From 87465eda1cd31f84ad8d60d8b402008298741939 Mon Sep 17 00:00:00 2001 From: dbochkov-flexcompute Date: Mon, 13 Oct 2025 15:25:58 -0700 Subject: [PATCH 1/7] surface monitors --- poetry.lock | 80 ++- pyproject.toml | 5 +- scripts/test_coverage.sh | 1 + .../_test_unstructured_pyvista_no_pyvista.py | 117 +++++ tests/test_components/test_monitor.py | 40 ++ tests/test_data/test_unstructured_pyvista.py | 299 +++++++++++ tidy3d/__init__.py | 12 + .../components/base_sim/data/monitor_data.py | 96 ++++ tidy3d/components/data/data_array.py | 96 +++- tidy3d/components/data/dataset.py | 83 ++++ tidy3d/components/data/monitor_data.py | 202 +++++++- tidy3d/components/data/unstructured/base.py | 444 +++++++++++------ .../components/data/unstructured/surface.py | 463 ++++++++++++++++++ .../data/unstructured/tetrahedral.py | 72 +-- .../data/unstructured/triangular.py | 16 +- tidy3d/components/medium.py | 17 + tidy3d/components/monitor.py | 179 +++++++ tidy3d/components/simulation.py | 66 +++ .../tcad/data/monitor_data/abstract.py | 80 +-- tidy3d/components/types/__init__.py | 2 + tidy3d/components/types/base.py | 1 + tidy3d/components/viz/__init__.py | 11 +- tidy3d/components/viz/axes_utils.py | 101 ++++ tidy3d/packaging.py | 25 + 24 files changed, 2206 insertions(+), 302 deletions(-) create mode 100644 tests/_test_data/_test_unstructured_pyvista_no_pyvista.py create mode 100644 tests/test_data/test_unstructured_pyvista.py create mode 100644 tidy3d/components/data/unstructured/surface.py diff --git a/poetry.lock b/poetry.lock index 51835bb611..2469e1956f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "absl-py" @@ -4503,7 +4503,7 @@ description = "A small Python package for determining appropriate platform-speci optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"dev\" or extra == \"docs\"" +markers = "extra == \"dev\" or extra == \"docs\" or extra == \"pyvista\" or extra == \"heatcharge\"" files = [ {file = "platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85"}, {file = "platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf"}, @@ -4531,6 +4531,29 @@ files = [ dev = ["pre-commit", "tox"] testing = ["coverage", "pytest", "pytest-benchmark"] +[[package]] +name = "pooch" +version = "1.8.2" +description = "A friend to fetch your data files" +optional = true +python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"dev\" or extra == \"pyvista\" or extra == \"heatcharge\"" +files = [ + {file = "pooch-1.8.2-py3-none-any.whl", hash = "sha256:3529a57096f7198778a5ceefd5ac3ef0e4d06a6ddaf9fc2d609b806f25302c47"}, + {file = "pooch-1.8.2.tar.gz", hash = "sha256:76561f0de68a01da4df6af38e9955c4c9d1a5c90da73f7e40276a5728ec83d10"}, +] + +[package.dependencies] +packaging = ">=20.0" +platformdirs = ">=2.5.0" +requests = ">=2.19.0" + +[package.extras] +progress = ["tqdm (>=4.41.0,<5.0.0)"] +sftp = ["paramiko (>=2.7.0)"] +xxhash = ["xxhash (>=1.4.3)"] + [[package]] name = "pre-commit" version = "4.3.0" @@ -5150,6 +5173,34 @@ files = [ {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] +[[package]] +name = "pyvista" +version = "0.46.3" +description = "Easier Pythonic interface to VTK" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"dev\" or extra == \"pyvista\" or extra == \"heatcharge\"" +files = [ + {file = "pyvista-0.46.3-py3-none-any.whl", hash = "sha256:ccb2a09019cad7976cfeeeb4f00fec2087127499ded795bada9a6a39d4bff30e"}, + {file = "pyvista-0.46.3.tar.gz", hash = "sha256:1f8e6e39516d24d93bc227f278841a0d72ec3722959f612cf4616d2720a8afe1"}, +] + +[package.dependencies] +matplotlib = ">=3.0.1" +numpy = ">=1.21.0" +pillow = "*" +pooch = "*" +scooby = ">=0.5.1" +typing-extensions = ">=4.10" +vtk = "<9.4.0 || >9.4.0,<9.4.1 || >9.4.1,<9.6.0" + +[package.extras] +all = ["pyvista[colormaps,io,jupyter]"] +colormaps = ["cmcrameri", "cmocean", "colorcet"] +io = ["imageio", "meshio (>=5.2)"] +jupyter = ["ipywidgets", "jupyter-server-proxy", "nest_asyncio", "trame (>=2.5.2)", "trame-client (>=2.12.7)", "trame-server (>=2.11.7)", "trame-vtk (>=2.5.8)", "trame-vuetify (>=2.3.1)"] + [[package]] name = "pywin32" version = "311" @@ -5993,6 +6044,22 @@ dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodest doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "linkify-it-py", "matplotlib (>=3.5)", "myst-nb (>=1.2.0)", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.2.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest (>=8.0.0)", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +[[package]] +name = "scooby" +version = "0.10.2" +description = "A Great Dane turned Python environment detective" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"dev\" or extra == \"pyvista\" or extra == \"heatcharge\"" +files = [ + {file = "scooby-0.10.2-py3-none-any.whl", hash = "sha256:8aec2f3f7fb541bf2c9795cad43a88c976869248a4c16523f07f366388ffcfff"}, + {file = "scooby-0.10.2.tar.gz", hash = "sha256:974e115877f3dde8f2bbfd68734d761c629f1b27f54cc29aaede1c8303e85379"}, +] + +[package.extras] +cpu = ["mkl", "psutil"] + [[package]] name = "send2trash" version = "1.8.3" @@ -7260,7 +7327,7 @@ description = "VTK is an open-source toolkit for 3D computer graphics, image pro optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"dev\" or extra == \"vtk\" or extra == \"heatcharge\"" +markers = "extra == \"dev\" or extra == \"vtk\" or extra == \"heatcharge\" or extra == \"pyvista\"" files = [ {file = "vtk-9.5.2-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:9ca87122352cf3c8748fee73c48930efa46fe1a868149a1f760bc17e8fae27ba"}, {file = "vtk-9.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6da02d69dcf2d42472ec8c227e6a8406cedea53d3928af97f8d4e776ff89c95f"}, @@ -7463,11 +7530,12 @@ type = ["pytest-mypy"] [extras] design = ["bayesian-optimization", "pygad", "pyswarms"] -dev = ["bayesian-optimization", "cma", "coverage", "devsim", "diff-cover", "dill", "gdstk", "grcwa", "ipython", "ipython", "jinja2", "jupyter", "memory_profiler", "myst-parser", "nbconvert", "nbdime", "nbsphinx", "networkx", "openpyxl", "optax", "pre-commit", "psutil", "pydata-sphinx-theme", "pygad", "pylint", "pyswarms", "pytest", "pytest-cov", "pytest-env", "pytest-timeout", "pytest-xdist", "rtree", "ruff", "sax", "scikit-rf", "signac", "sphinx", "sphinx-book-theme", "sphinx-copybutton", "sphinx-design", "sphinx-favicon", "sphinx-notfound-page", "sphinx-sitemap", "sphinx-tabs", "sphinxemoji", "tmm", "torch", "torch", "tox", "trimesh", "vtk"] +dev = ["bayesian-optimization", "cma", "coverage", "devsim", "diff-cover", "dill", "gdstk", "grcwa", "ipython", "ipython", "jinja2", "jupyter", "memory_profiler", "myst-parser", "nbconvert", "nbdime", "nbsphinx", "networkx", "openpyxl", "optax", "pre-commit", "psutil", "pydata-sphinx-theme", "pygad", "pylint", "pyswarms", "pytest", "pytest-cov", "pytest-env", "pytest-timeout", "pytest-xdist", "pyvista", "rtree", "ruff", "sax", "scikit-rf", "signac", "sphinx", "sphinx-book-theme", "sphinx-copybutton", "sphinx-design", "sphinx-favicon", "sphinx-notfound-page", "sphinx-sitemap", "sphinx-tabs", "sphinxemoji", "tmm", "torch", "torch", "tox", "trimesh", "vtk"] docs = ["cma", "devsim", "gdstk", "grcwa", "ipython", "jinja2", "jupyter", "myst-parser", "nbconvert", "nbdime", "nbsphinx", "openpyxl", "optax", "pydata-sphinx-theme", "pylint", "sax", "signac", "sphinx", "sphinx-book-theme", "sphinx-copybutton", "sphinx-design", "sphinx-favicon", "sphinx-notfound-page", "sphinx-sitemap", "sphinx-tabs", "sphinxemoji", "tmm"] gdstk = ["gdstk"] -heatcharge = ["devsim", "trimesh", "vtk"] +heatcharge = ["devsim", "pyvista", "trimesh", "vtk"] pytorch = ["torch", "torch"] +pyvista = ["pyvista"] ruff = ["ruff"] scikit-rf = ["scikit-rf"] trimesh = ["networkx", "rtree", "trimesh"] @@ -7476,4 +7544,4 @@ vtk = ["vtk"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "591568dd5f68370208830b200214d40918ff9e5a85b8c17a44c86a15b867e3b2" +content-hash = "1d47a718d856c4da540314b51e1801e11a3f9fa2b1f410230a80ad6a753c0834" diff --git a/pyproject.toml b/pyproject.toml index 242ad0003e..664e099aa7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,6 +112,7 @@ signac = { version = "*", optional = true } flax = { version = ">=0.8.2", optional = true } sax = { version = "^0.11", optional = true } vtk = { version = ">=9.2.6", optional = true } +pyvista = { version = ">=0.43.0", optional = true } sphinxemoji = { version = "*", optional = true } devsim = { version = "*", optional = true } cma = { version = "*", optional = true } @@ -167,6 +168,7 @@ dev = [ 'trimesh', 'scikit-rf', 'vtk', + 'pyvista', 'devsim', 'cma', 'diff-cover', @@ -206,8 +208,9 @@ gdstk = ["gdstk"] scikit-rf = ["scikit-rf"] trimesh = ["trimesh", "networkx", "rtree"] vtk = ["vtk"] +pyvista = ["pyvista"] ruff = ["ruff"] -heatcharge = ["trimesh", "vtk", "devsim"] +heatcharge = ["trimesh", "vtk", "pyvista", "devsim"] # plugins with extra deps pytorch = ["torch"] design = ["bayesian-optimization", "pygad", "pyswarms"] diff --git a/scripts/test_coverage.sh b/scripts/test_coverage.sh index 43ff2e5671..3e0461a6bd 100755 --- a/scripts/test_coverage.sh +++ b/scripts/test_coverage.sh @@ -4,6 +4,7 @@ set -e pytest -rA --cov tests/ # to test without vtk, one has to restart pytest pytest -rA tests/_test_data/_test_datasets_no_vtk.py +pytest -rA tests/_test_data/_test_unstructured_pyvista_no_pyvista.py pytest --doctest-modules tidy3d/ docs/ coverage xml diff-cover --compare-branch=origin/develop coverage.xml diff --git a/tests/_test_data/_test_unstructured_pyvista_no_pyvista.py b/tests/_test_data/_test_unstructured_pyvista_no_pyvista.py new file mode 100644 index 0000000000..b1c2fde1c0 --- /dev/null +++ b/tests/_test_data/_test_unstructured_pyvista_no_pyvista.py @@ -0,0 +1,117 @@ +"""Tests PyVista visualization without pyvista installed.""" + +from __future__ import annotations + +import builtins + +import pytest + +from ..test_data.test_unstructured_pyvista import ( + test_surface_combined_plot_and_quiver as _test_surface_combined_plot_and_quiver, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_plot_basic as _test_surface_plot_basic, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_plot_customization as _test_surface_plot_customization, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_plot_error_nothing_to_plot as _test_surface_plot_error_nothing_to_plot, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_plot_grid_only as _test_surface_plot_grid_only, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_plot_plotter_reuse as _test_surface_plot_plotter_reuse, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_plot_windowed_parameter as _test_surface_plot_windowed_parameter, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_plot_with_grid as _test_surface_plot_with_grid, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_quiver_basic as _test_surface_quiver_basic, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_quiver_customization as _test_surface_quiver_customization, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_quiver_error_wrong_num_fields as _test_surface_quiver_error_wrong_num_fields, +) +from ..test_data.test_unstructured_pyvista import ( + test_surface_quiver_magnitude_coloring as _test_surface_quiver_magnitude_coloring, +) + + +@pytest.fixture +def hide_pyvista(monkeypatch, request): + """Hide pyvista module to test error handling when not installed.""" + import_orig = builtins.__import__ + + def mocked_import(name, *args, **kwargs): + if name == "pyvista": + raise ImportError + return import_orig(name, *args, **kwargs) + + monkeypatch.setattr(builtins, "__import__", mocked_import) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_plot_basic_no_pyvista(): + _test_surface_plot_basic(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_plot_with_grid_no_pyvista(): + _test_surface_plot_with_grid(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_plot_customization_no_pyvista(): + _test_surface_plot_customization(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_plot_error_nothing_to_plot_no_pyvista(): + _test_surface_plot_error_nothing_to_plot(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_plot_grid_only_no_pyvista(): + _test_surface_plot_grid_only(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_plot_plotter_reuse_no_pyvista(): + _test_surface_plot_plotter_reuse(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_quiver_basic_no_pyvista(): + _test_surface_quiver_basic(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_quiver_customization_no_pyvista(): + _test_surface_quiver_customization(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_quiver_error_wrong_num_fields_no_pyvista(): + _test_surface_quiver_error_wrong_num_fields(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_quiver_magnitude_coloring_no_pyvista(): + _test_surface_quiver_magnitude_coloring(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_combined_plot_and_quiver_no_pyvista(): + _test_surface_combined_plot_and_quiver(no_pyvista=True) + + +@pytest.mark.usefixtures("hide_pyvista") +def test_surface_plot_windowed_parameter_no_pyvista(): + _test_surface_plot_windowed_parameter(None, no_pyvista=True) diff --git a/tests/test_components/test_monitor.py b/tests/test_components/test_monitor.py index 725baa5689..f34251ab79 100644 --- a/tests/test_components/test_monitor.py +++ b/tests/test_components/test_monitor.py @@ -445,3 +445,43 @@ def test_monitor_surfaces_from_volume(): # z+ surface assert monitor_surfaces[5].center == (center[0], center[1], center[2] + size[2] / 2.0) assert monitor_surfaces[5].size == (size[0], size[1], 0.0) + + +def test_surface_monitors(): + pec_sphere = td.Structure(geometry=td.Sphere(radius=0.5), medium=td.PECMedium()) + + surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") + + _ = td.Simulation( + size=(2, 2, 2), + structures=[pec_sphere], + monitors=[surf_mnt], + run_time=1e-12, + grid_spec=td.GridSpec.auto(wavelength=1), + ) + + # monitor doesn't overlap any pec structure + with pytest.raises(pydantic.ValidationError): + surf_mnt = td.SurfaceFieldMonitor( + size=(0.2, 1, 1), center=(0.8, 0, 0), freqs=[td.C_0], name="surface" + ) + + _ = td.Simulation( + size=(2, 2, 2), + structures=[pec_sphere], + monitors=[surf_mnt], + run_time=1e-12, + grid_spec=td.GridSpec.auto(wavelength=1), + ) + + # tests warning message + with AssertLogLevel("WARNING"): + surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") + + _ = td.Simulation( + size=(2, 2, 2), + structures=[pec_sphere], + monitors=[surf_mnt], + run_time=1e-12, + grid_spec=td.GridSpec.auto(wavelength=1), + ) diff --git a/tests/test_data/test_unstructured_pyvista.py b/tests/test_data/test_unstructured_pyvista.py new file mode 100644 index 0000000000..681eb9da3f --- /dev/null +++ b/tests/test_data/test_unstructured_pyvista.py @@ -0,0 +1,299 @@ +"""Tests PyVista visualization for TriangularSurfaceDataset.""" + +from __future__ import annotations + +import numpy as np +import pytest + +np.random.seed(4) + + +def _create_simple_surface(): + """Create a simple triangulated surface for testing.""" + import tidy3d as td + + # Create a 2x2 grid of triangles forming a pyramid + points = td.PointDataArray( + [ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [1.0, 1.0, 0.0], + [0.5, 0.5, 0.5], # Raised center point + ], + coords={"index": np.arange(5), "axis": np.arange(3)}, + ) + + cells = td.CellDataArray( + [ + [0, 1, 4], + [1, 3, 4], + [3, 2, 4], + [2, 0, 4], + ], + coords={"cell_index": np.arange(4), "vertex_index": np.arange(3)}, + ) + + # Scalar field values + values = td.IndexedDataArray( + [1.0, 2.0, 3.0, 4.0, 5.0], + coords={"index": np.arange(5)}, + ) + + return td.TriangularSurfaceDataset(points=points, cells=cells, values=values) + + +def _create_vector_surface(): + """Create a surface with vector field for quiver testing.""" + import tidy3d as td + + # Same geometry as simple surface + points = td.PointDataArray( + [ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [1.0, 1.0, 0.0], + [0.5, 0.5, 0.5], + ], + coords={"index": np.arange(5), "axis": np.arange(3)}, + ) + + cells = td.CellDataArray( + [ + [0, 1, 4], + [1, 3, 4], + [3, 2, 4], + [2, 0, 4], + ], + coords={"cell_index": np.arange(4), "vertex_index": np.arange(3)}, + ) + + # Vector field (pointing outward from center) + vectors = td.IndexedDataArray( + [ + [-1.0, -1.0, 0.0], + [1.0, -1.0, 0.0], + [-1.0, 1.0, 0.0], + [1.0, 1.0, 0.0], + [0.0, 0.0, 1.0], + ], + coords={"index": np.arange(5), "axis": np.arange(3)}, + ) + + return td.TriangularSurfaceDataset(points=points, cells=cells, values=vectors) + + +def test_surface_plot_basic(no_pyvista=False): + """Test basic plot functionality.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_simple_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(show=False) + else: + import pyvista as pv + + plotter = dataset.plot(show=False) + assert isinstance(plotter, pv.Plotter), "Should return PyVista Plotter" + plotter.close() + + +def test_surface_plot_with_grid(no_pyvista=False): + """Test plot with grid overlay.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_simple_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(grid=True, show=False) + else: + import pyvista as pv + + plotter = dataset.plot(grid=True, grid_color="white", grid_width=2, show=False) + assert isinstance(plotter, pv.Plotter) + plotter.close() + + +def test_surface_plot_customization(no_pyvista=False): + """Test plot customization options.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_simple_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(cmap="plasma", show=False) + else: + import pyvista as pv + + plotter = dataset.plot(cmap="plasma", vmin=0, vmax=6, opacity=0.8, show=False) + assert isinstance(plotter, pv.Plotter) + plotter.close() + + +def test_surface_plot_error_nothing_to_plot(no_pyvista=False): + """Test error when both field and grid are False.""" + from tidy3d.exceptions import DataError, Tidy3dImportError + + dataset = _create_simple_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(field=False, grid=False, show=False) + else: + with pytest.raises(DataError, match="Nothing to plot"): + _ = dataset.plot(field=False, grid=False, show=False) + + +def test_surface_plot_grid_only(no_pyvista=False): + """Test plot with only grid, no field.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_simple_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(field=False, grid=True, show=False) + else: + import pyvista as pv + + plotter = dataset.plot(field=False, grid=True, show=False) + assert isinstance(plotter, pv.Plotter) + plotter.close() + + +def test_surface_plot_plotter_reuse(no_pyvista=False): + """Test reusing plotter for multiple surfaces.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_simple_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(show=False) + else: + import pyvista as pv + + # Create plotter manually + plotter = pv.Plotter(off_screen=True) + + # Add first surface + result1 = dataset.plot(plotter=plotter, show=False) + + # Add second surface with different styling + result2 = dataset.plot(plotter=plotter, opacity=0.5, show=False) + + assert result1 is plotter, "Should return same plotter" + assert result2 is plotter, "Should return same plotter" + + plotter.close() + + +def test_surface_quiver_basic(no_pyvista=False): + """Test basic quiver plot.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_vector_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.quiver(show=False) + else: + import pyvista as pv + + plotter = dataset.quiver(show=False) + assert isinstance(plotter, pv.Plotter) + plotter.close() + + +def test_surface_quiver_customization(no_pyvista=False): + """Test quiver customization options.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_vector_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.quiver(scale=0.2, show=False) + else: + import pyvista as pv + + plotter = dataset.quiver(scale=0.2, downsampling=1, color="red", cbar=False, show=False) + assert isinstance(plotter, pv.Plotter) + plotter.close() + + +def test_surface_quiver_error_wrong_num_fields(no_pyvista=False): + """Test error when not exactly 3 fields.""" + from tidy3d.exceptions import DataError, Tidy3dImportError + + # Use scalar dataset (not vector) + dataset = _create_simple_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.quiver(show=False) + else: + with pytest.raises(DataError, match="exactly 3 fields"): + _ = dataset.quiver(show=False) + + +def test_surface_quiver_magnitude_coloring(no_pyvista=False): + """Test quiver with magnitude-based coloring.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_vector_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.quiver(color="magnitude", show=False) + else: + import pyvista as pv + + plotter = dataset.quiver(color="magnitude", cmap="viridis", show=False) + assert isinstance(plotter, pv.Plotter) + plotter.close() + + +def test_surface_combined_plot_and_quiver(no_pyvista=False): + """Test combining surface plot with vector field on same plotter.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_vector_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.quiver(show=False) + else: + import pyvista as pv + + # Create plotter manually to test combination + plotter = pv.Plotter(off_screen=True) + + # Add quiver (in real use case, would add surface of scalar field first) + dataset.quiver(plotter=plotter, show=False) + + assert isinstance(plotter, pv.Plotter) + plotter.close() + + +@pytest.mark.parametrize("windowed", [True, False, None]) +def test_surface_plot_windowed_parameter(windowed, no_pyvista=False): + """Test windowed parameter for display mode control.""" + from tidy3d.exceptions import Tidy3dImportError + + dataset = _create_simple_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(windowed=windowed, show=False) + else: + import pyvista as pv + + plotter = dataset.plot(windowed=windowed, show=False) + assert isinstance(plotter, pv.Plotter) + plotter.close() diff --git a/tidy3d/__init__.py b/tidy3d/__init__.py index 88697d8e17..10500366b4 100644 --- a/tidy3d/__init__.py +++ b/tidy3d/__init__.py @@ -157,7 +157,10 @@ FluxTimeDataArray, HeatDataArray, IndexedDataArray, + IndexedFieldDataArray, + IndexedFieldTimeDataArray, IndexedFieldVoltageDataArray, + IndexedFreqDataArray, IndexedTimeDataArray, IndexedVoltageDataArray, ModeAmpsDataArray, @@ -196,6 +199,7 @@ PermittivityData, ) from .components.data.sim_data import DATA_TYPE_MAP, SimulationData +from .components.data.unstructured.surface import TriangularSurfaceDataset from .components.data.utils import ( TetrahedralGridDataset, TriangularGridDataset, @@ -325,6 +329,8 @@ ModeSolverMonitor, Monitor, PermittivityMonitor, + SurfaceFieldMonitor, + SurfaceFieldTimeMonitor, ) from .components.parameter_perturbation import ( CustomChargePerturbation, @@ -608,7 +614,10 @@ def set_logging_level(level: str) -> None: "HuraySurfaceRoughness", "IndexPerturbation", "IndexedDataArray", + "IndexedFieldDataArray", + "IndexedFieldTimeDataArray", "IndexedFieldVoltageDataArray", + "IndexedFreqDataArray", "IndexedTimeDataArray", "IndexedVoltageDataArray", "InsulatingBC", @@ -728,6 +737,8 @@ def set_logging_level(level: str) -> None: "StructureSimulationBoundary", "StructureStructureInterface", "SubpixelSpec", + "SurfaceFieldMonitor", + "SurfaceFieldTimeMonitor", "SurfaceImpedance", "SurfaceImpedanceFitterParam", "TemperatureBC", @@ -737,6 +748,7 @@ def set_logging_level(level: str) -> None: "Transformed", "TriangleMesh", "TriangularGridDataset", + "TriangularSurfaceDataset", "TwoPhotonAbsorption", "UniformCurrentSource", "UniformGrid", diff --git a/tidy3d/components/base_sim/data/monitor_data.py b/tidy3d/components/base_sim/data/monitor_data.py index 18fb82728a..fd6453c04e 100644 --- a/tidy3d/components/base_sim/data/monitor_data.py +++ b/tidy3d/components/base_sim/data/monitor_data.py @@ -2,12 +2,19 @@ from __future__ import annotations +import copy from abc import ABC +from typing import Literal, Optional, Union +import numpy as np import pydantic.v1 as pd +from xarray import DataArray as XrDataArray from tidy3d.components.base_sim.monitor import AbstractMonitor +from tidy3d.components.data.data_array import SpatialDataArray from tidy3d.components.data.dataset import Dataset +from tidy3d.components.data.utils import UnstructuredGridDatasetType +from tidy3d.components.types import Coordinate, Symmetry class AbstractMonitorData(Dataset, ABC): @@ -25,3 +32,92 @@ class AbstractMonitorData(Dataset, ABC): def symmetry_expanded_copy(self) -> AbstractMonitorData: """Return copy of self with symmetry applied.""" return self.copy() + + +class AbstractUnstructuredMonitorData(AbstractMonitorData, ABC): + """Abstract base class of objects that store data from unstructured monitors.""" + + symmetry: tuple[Symmetry, Symmetry, Symmetry] = pd.Field( + (0, 0, 0), + title="Symmetry", + description="Symmetry of the original simulation in x, y, and z.", + ) + + symmetry_center: Coordinate = pd.Field( + (0, 0, 0), + title="Symmetry Center", + description="Symmetry center of the original simulation in x, y, and z.", + ) + + def _symmetry_expanded_copy_base( + self, + property: Union[UnstructuredGridDatasetType, SpatialDataArray], + custom_symmetry: Optional[tuple[Union[Literal[-1, 1], XrDataArray], ...]] = None, + ) -> Union[UnstructuredGridDatasetType, SpatialDataArray]: + """Return the property with symmetry applied.""" + + # no symmetry + if all(sym == 0 for sym in self.symmetry): + return property + + new_property = copy.copy(property) + + mnt_bounds = np.array(self.monitor.bounds) + + if isinstance(new_property, SpatialDataArray): + data_bounds = [ + [np.min(new_property.x), np.min(new_property.y), np.min(new_property.z)], + [np.max(new_property.x), np.max(new_property.y), np.max(new_property.z)], + ] + else: + data_bounds = new_property.bounds + + dims_need_clipping_left = [] + dims_need_clipping_right = [] + for dim in range(3): + # do not expand monitor with zero size along symmetry direction + # this is done because 2d unstructured data does not support this + if self.symmetry[dim] != 0: + symmetry_factor = ( + self.symmetry[dim] if custom_symmetry is None else custom_symmetry[dim] + ) + center = self.symmetry_center[dim] + + if mnt_bounds[1][dim] < data_bounds[0][dim]: + # (note that mnt_bounds[0][dim] < 2 * center - data_bounds[0][dim] will be satisfied based on backend behavior) + # simple reflection + new_property = new_property.reflect( + axis=dim, center=center, reflection_only=True, symmetry=symmetry_factor + ) + elif mnt_bounds[0][dim] < 2 * center - data_bounds[0][dim]: + # expand only if monitor bounds missing data + # if we do expand, simply reflect symmetrically the whole data + new_property = new_property.reflect( + axis=dim, center=center, symmetry=symmetry_factor + ) + + # if it turns out that we expanded too much, we will trim unnecessary data later + if mnt_bounds[0][dim] > 2 * center - data_bounds[1][dim]: + dims_need_clipping_left.append(dim) + + # likewise, if some of original data was only for symmetry expansion, thim excess on the right + if mnt_bounds[1][dim] < data_bounds[1][dim]: + dims_need_clipping_right.append(dim) + + # trim over-expanded data + if len(dims_need_clipping_left) > 0 or len(dims_need_clipping_right) > 0: + # enlarge clipping domain on positive side arbitrary by 1 + # should not matter by how much + clip_bounds = [mnt_bounds[0] - 1, mnt_bounds[1] + 1] + for dim in dims_need_clipping_left: + clip_bounds[0][dim] = mnt_bounds[0][dim] + + for dim in dims_need_clipping_right: + clip_bounds[1][dim] = mnt_bounds[1][dim] + + if isinstance(new_property, SpatialDataArray): + new_property = new_property.sel_inside(clip_bounds) + else: + new_property = new_property.box_clip(bounds=clip_bounds) + + return new_property diff --git a/tidy3d/components/data/data_array.py b/tidy3d/components/data/data_array.py index 97a4d2703c..df1226eeb8 100644 --- a/tidy3d/components/data/data_array.py +++ b/tidy3d/components/data/data_array.py @@ -232,7 +232,7 @@ def to_hdf5_handle(self, f_handle: h5py.File, group_path: str) -> None: sub_group = f_handle.create_group(group_path) sub_group[DATA_ARRAY_VALUE_NAME] = get_static(self.data) for key, val in self.coords.items(): - if val.dtype == " SpatialDataArray: + def reflect( + self, axis: Axis, center: float, reflection_only: bool = False, symmetry: float = 1 + ) -> SpatialDataArray: """Reflect data across the plane define by parameters ``axis`` and ``center`` from right to left. Note that the returned data is sorted with respect to spatial coordinates. @@ -744,6 +746,8 @@ def reflect(self, axis: Axis, center: float, reflection_only: bool = False) -> S Location of the reflection plane along its normal direction. reflection_only : bool = False Return only reflected data. + symmetry : float = 1 + Symmetry factor of the reflection. Returns ------- @@ -769,7 +773,7 @@ def reflect(self, axis: Axis, center: float, reflection_only: bool = False) -> S coords[axis] = 2 * center - coords[axis] coords_dict = dict(zip("xyz", coords)) - tmp_arr = SpatialDataArray(sorted_self.data, coords=coords_dict) + tmp_arr = SpatialDataArray(sorted_self.data * symmetry, coords=coords_dict) return tmp_arr.sortby("xyz"[axis]) @@ -785,7 +789,7 @@ def reflect(self, axis: Axis, center: float, reflection_only: bool = False) -> S new_data = np.zeros(shape) - new_data[ind_left[0], ind_left[1], ind_left[2]] = data + new_data[ind_left[0], ind_left[1], ind_left[2]] = data * symmetry new_data[ind_right[0], ind_right[1], ind_right[2]] = data new_coords = np.zeros(shape[axis]) @@ -1518,6 +1522,81 @@ class ImpedanceFreqModeDataArray(ImpedanceArray, FreqModeDataArray): __slots__ = () +class IndexedSurfaceFreqDataArray(DataArray): + """Stores indexed values of scalar fields on the sides of a surface. It is typically used + in conjuction with a ``PointDataArray`` to store point-associated scalar data. + + Example + ------- + >>> surface_side_array = IndexedSurfaceFreqDataArray( + ... (1+1j) * np.random.random((4,2,1)), coords=dict(index=np.arange(4), side=["outside", "inside"], f=[1e9]) + ... ) + """ + + __slots__ = () + _dims = ("index", "side", "f") + + +class IndexedSurfaceTimeDataArray(DataArray): + """Stores indexed values of scalar fields on the sides of a surface. It is typically used + in conjuction with a ``PointDataArray`` to store point-associated scalar data. + + Example + ------- + >>> surface_side_array = IndexedSurfaceTimeDataArray( + ... (1+1j) * np.random.random((4,2,1)), coords=dict(index=np.arange(4), side=["outside", "inside"], f=[1e9]) + ... ) + """ + + __slots__ = () + _dims = ("index", "side", "t") + + +class IndexedFieldDataArray(DataArray): + """Stores indexed values of vector fields in frequency domain. It is typically used + in conjuction with a ``PointDataArray`` to store point-associated vector data. + + Example + ------- + >>> indexed_array = IndexedFieldDataArray( + ... (1+1j) * np.random.random((4,3,1)), coords=dict(index=np.arange(4), axis=np.arange(3), f=[1e9]) + ... ) + """ + + __slots__ = () + _dims = ("index", "side", "axis", "f") + + +class IndexedFieldTimeDataArray(DataArray): + """Stores indexed values of vector fields in time domain. It is typically used + in conjuction with a ``PointDataArray`` to store point-associated vector data. + + Example + ------- + >>> indexed_array = IndexedFieldDataArray( + ... (1+1j) * np.random.random((4,3,1)), coords=dict(index=np.arange(4), axis=np.arange(3), t=[0]) + ... ) + """ + + __slots__ = () + _dims = ("index", "side", "axis", "t") + + +class IndexedFreqDataArray(DataArray): + """Stores indexed values of scalar fields in frequency domain. It is typically used + in conjuction with a ``PointDataArray`` to store point-associated vector data. + + Example + ------- + >>> indexed_array = IndexedFieldDataArray( + ... (1+1j) * np.random.random((4,1)), coords=dict(index=np.arange(4), f=[1e9]) + ... ) + """ + + __slots__ = () + _dims = ("index", "f") + + def _make_base_result_data_array(result: DataArray) -> IntegralResultTypes: """Helper for creating the proper base result type.""" cls = FreqDataArray @@ -1605,14 +1684,23 @@ def _make_impedance_data_array(result: DataArray) -> ImpedanceResultTypes: ImpedanceFreqDataArray, ImpedanceTimeDataArray, ImpedanceFreqModeDataArray, + IndexedFieldDataArray, + IndexedFieldTimeDataArray, + IndexedFreqDataArray, ] + DATA_ARRAY_MAP = {data_array.__name__: data_array for data_array in DATA_ARRAY_TYPES} IndexedDataArrayTypes = Union[ IndexedDataArray, IndexedVoltageDataArray, + IndexedFieldDataArray, + IndexedFieldTimeDataArray, + IndexedFreqDataArray, IndexedTimeDataArray, IndexedFieldVoltageDataArray, + IndexedSurfaceFreqDataArray, + IndexedSurfaceTimeDataArray, PointDataArray, ] diff --git a/tidy3d/components/data/dataset.py b/tidy3d/components/data/dataset.py index a3b991b33e..0e08a69d7b 100644 --- a/tidy3d/components/data/dataset.py +++ b/tidy3d/components/data/dataset.py @@ -29,6 +29,7 @@ TimeDataArray, TriangleMeshDataArray, ) +from .unstructured.surface import TriangularSurfaceDataset from .zbf import ZBFData DEFAULT_MAX_SAMPLES_PER_STEP = 10_000 @@ -482,6 +483,88 @@ class AuxFieldTimeDataset(AuxFieldDataset): ) +class ElectromagneticSurfaceFieldDataset(AbstractFieldDataset, ABC): + """Stores a collection of E and H fields with x, y, z components on one side of the surface.""" + + E: Optional[TriangularSurfaceDataset] = pd.Field( + None, + title="E", + description="Spatial distribution of the electric field on the one side of the surface.", + ) + + H: Optional[TriangularSurfaceDataset] = pd.Field( + None, + title="H", + description="Spatial distribution of the magnetic field on the one side of the surface.", + ) + + normal: TriangularSurfaceDataset = pd.Field( + ..., + title="Normal", + description="Normal direction of the surface oriented outward from the surface.", + ) + + @property + def field_components(self) -> dict[str, DataArray]: + """Maps the field components to their associated data.""" + fields = { + "E": self.E, + "H": self.H, + } + return {field_name: field for field_name, field in fields.items() if field is not None} + + @property + def intensity(self) -> TriangularSurfaceDataset: + """Return the sum of the squared absolute electric field components.""" + if self.E is None: + raise ValueError( + "Could not calculate intensity: the dataset does not contain E field information." + ) + intensity = self.E.norm(dim="axis") ** 2 + return intensity + + @property + def current_density(self) -> TriangularSurfaceDataset: + """Surface current density.""" + + h_diff = 0 + template = None + # we assume that is data is None it means field is zero on that side (e.g. PEC) + H_inside = self.H.sel(side="inside", drop=True) if "inside" in self.H.side else None + H_outside = self.H.sel(side="outside", drop=True) if "outside" in self.H.side else None + if H_inside is not None: + h_diff += H_inside.values + template = H_inside + if H_outside is not None: + h_diff -= H_outside.values + template = H_outside + + if template is None: + raise ValueError( + "Could not calculate current density: the dataset does not contain H field information." + ) + + return template.updated_copy(values=xr.cross(h_diff, self.normal.values, dim="axis")) + + @property + def grid_locations(self) -> dict[str, str]: + """Maps field components to the string key of their grid locations on the yee lattice.""" + raise RuntimeError("Function 'grid_location' does not apply to surface monitors.") + + @property + def symmetry_eigenvalues(self) -> dict[str, Callable[[Axis], float]]: + """Maps field components to their (positive) symmetry eigenvalues.""" + + return { + "Ex": lambda dim: -1 if (dim == 0) else +1, + "Ey": lambda dim: -1 if (dim == 1) else +1, + "Ez": lambda dim: -1 if (dim == 2) else +1, + "Hx": lambda dim: +1 if (dim == 0) else -1, + "Hy": lambda dim: +1 if (dim == 1) else -1, + "Hz": lambda dim: +1 if (dim == 2) else -1, + } + + class ModeSolverDataset(ElectromagneticFieldDataset): """Dataset storing scalar components of E and H fields as a function of freq. and mode_index. diff --git a/tidy3d/components/data/monitor_data.py b/tidy3d/components/data/monitor_data.py index 9e0a967334..47a4d9a7c0 100644 --- a/tidy3d/components/data/monitor_data.py +++ b/tidy3d/components/data/monitor_data.py @@ -14,8 +14,11 @@ from pandas import DataFrame from xarray.core.types import Self -from tidy3d.components.base import cached_property, skip_if_fields_missing -from tidy3d.components.base_sim.data.monitor_data import AbstractMonitorData +from tidy3d.components.base import TYPE_TAG_STR, cached_property, skip_if_fields_missing +from tidy3d.components.base_sim.data.monitor_data import ( + AbstractMonitorData, + AbstractUnstructuredMonitorData, +) from tidy3d.components.grid.grid import Coords, Grid from tidy3d.components.medium import Medium, MediumType from tidy3d.components.monitor import ( @@ -35,13 +38,14 @@ ModeSolverMonitor, MonitorType, PermittivityMonitor, + SurfaceFieldMonitor, + SurfaceFieldTimeMonitor, ) from tidy3d.components.source.base import Source from tidy3d.components.source.current import CustomCurrentSource, PointDipole from tidy3d.components.source.field import CustomFieldSource, ModeSource, PlaneWave from tidy3d.components.source.time import GaussianPulse, SourceTimeType from tidy3d.components.types import ( - TYPE_TAG_STR, ArrayFloat1D, ArrayFloat2D, Coordinate, @@ -86,12 +90,14 @@ AuxFieldTimeDataset, Dataset, ElectromagneticFieldDataset, + ElectromagneticSurfaceFieldDataset, FieldDataset, FieldTimeDataset, MediumDataset, ModeSolverDataset, PermittivityDataset, ) +from .unstructured.surface import TriangularSurfaceDataset Coords1D = ArrayFloat1D @@ -203,6 +209,8 @@ class AbstractFieldData(MonitorData, AbstractFieldDataset, ABC): PermittivityMonitor, ModeMonitor, MediumMonitor, + SurfaceFieldMonitor, + SurfaceFieldTimeMonitor, ] symmetry: tuple[Symmetry, Symmetry, Symmetry] = pd.Field( @@ -1536,6 +1544,192 @@ class AuxFieldTimeData(AuxFieldTimeDataset, AbstractFieldData): _contains_monitor_fields = enforce_monitor_fields_present() +class ElectromagneticSurfaceFieldData( + MonitorData, AbstractUnstructuredMonitorData, ElectromagneticSurfaceFieldDataset, ABC +): + """Collection of vector fields on a surface with some symmetry properties.""" + + monitor: Union[SurfaceFieldMonitor, SurfaceFieldTimeMonitor] + + _contains_monitor_fields = enforce_monitor_fields_present() + + @property + def symmetry_expanded(self): + """Return the :class:`.ElectromagneticSurfaceFieldData` with fields expanded based on symmetry. If + any symmetry is nonzero (i.e. expanded), the interpolation implicitly creates a copy of the + data array. However, if symmetry is not expanded, the returned array contains a view of + the data, not a copy. + + Returns + ------- + :class:`ElectromagneticSurfaceFieldData` + A data object with the symmetry expanded fields. + """ + + if all(sym == 0 for sym in self.symmetry): + return self + + return self._updated(self._symmetry_update_dict) + + @property + def symmetry_expanded_copy(self) -> AbstractFieldData: + """Create a copy of the :class:`.ElectromagneticSurfaceFieldData` with fields expanded based on symmetry. + + Returns + ------- + :class:`ElectromagneticSurfaceFieldData` + A data object with the symmetry expanded fields. + """ + + if all(sym == 0 for sym in self.symmetry): + return self.copy() + + return self.copy(update=self._symmetry_update_dict) + + @property + def _symmetry_update_dict(self) -> dict: + """Dictionary of data fields to create data with expanded symmetry.""" + + h_symmetry = [ + xr.DataArray([1, -1, -1], coords={"axis": [0, 1, 2]}) * self.symmetry[0], + xr.DataArray([-1, 1, -1], coords={"axis": [0, 1, 2]}) * self.symmetry[1], + xr.DataArray([-1, -1, 1], coords={"axis": [0, 1, 2]}) * self.symmetry[2], + ] + + e_symmetry = [ + xr.DataArray([-1, 1, 1], coords={"axis": [0, 1, 2]}) * self.symmetry[0], + xr.DataArray([1, -1, 1], coords={"axis": [0, 1, 2]}) * self.symmetry[1], + xr.DataArray([1, 1, -1], coords={"axis": [0, 1, 2]}) * self.symmetry[2], + ] + + # normal field is always even under symmetry + n_symmetry = [ + xr.DataArray([1, -1, -1], coords={"axis": [0, 1, 2]}), + xr.DataArray([-1, 1, -1], coords={"axis": [0, 1, 2]}), + xr.DataArray([-1, -1, 1], coords={"axis": [0, 1, 2]}), + ] + + updated_dict = {} + if self.E is not None: + updated_dict["E"] = self._symmetry_expanded_copy_base(self.E, e_symmetry) + if self.H is not None: + updated_dict["H"] = self._symmetry_expanded_copy_base(self.H, h_symmetry) + + updated_dict["normal"] = self._symmetry_expanded_copy_base(self.normal, n_symmetry) + return updated_dict + + def _check_fields_stored(self, components: list[str]): + """Check that all requested field components are stored in the data.""" + missing_comps = [comp for comp in components if comp not in self.field_components.keys()] + if len(missing_comps) > 0: + raise DataError( + f"Field components {missing_comps} not included in this data object. Use " + "the 'fields' argument of a field monitor to select which components are stored." + ) + + +class SurfaceFieldData(ElectromagneticSurfaceFieldData): + """ + Data associated with a :class:`.SurfaceFieldMonitor`: E and H fields on a surface. + + Example + ------- + >>> from tidy3d import PointDataArray, IndexedFieldDataArray, TriangularSurfaceDataset, CellDataArray + >>> import tidy3d as td + >>> old_logging_level = td.config.logging_level + >>> td.config.logging_level = "ERROR" + >>> points = PointDataArray([[0, 0, 0], [0, 1, 0], [1, 1, 1]], dims=["index", "axis"]) + >>> cells = CellDataArray([[0, 1, 2]], dims=["cell_index", "vertex_index"]) + >>> values = PointDataArray([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dims=["index", "axis"]) + >>> field_values = IndexedFieldDataArray(np.ones((3, 1, 3, 1)) + 0j, coords={"index": [0, 1, 2], "side": ["outside"],"axis": [0, 1, 2], "f": [1e10]}) + >>> field = TriangularSurfaceDataset(points=points, cells=cells, values=field_values) + >>> normal = TriangularSurfaceDataset(points=points, cells=cells, values=values) + >>> monitor = SurfaceFieldMonitor( + ... size=(2,4,6), freqs=[1e10], name='field', fields=['E', 'H'] + ... ) + >>> data = SurfaceFieldData(monitor=monitor, E=field, H=field, normal=normal) + >>> td.config.logging_level = old_logging_level + """ + + monitor: SurfaceFieldMonitor = pd.Field( + ..., title="Monitor", description="Frequency-domain field monitor associated with the data." + ) + + @property + def poynting(self) -> TriangularSurfaceDataset: + """Time-averaged Poynting vector for frequency-domain data.""" + + if self.E is None or self.H is None: + raise ValueError( + "Could not calculate poynting: the dataset does not contain E or H field information." + ) + e_field = self.E + h_field = self.H + + poynting = e_field.updated_copy( + values=0.5 * np.real(xr.cross(e_field.values, np.conj(h_field.values), dim="axis")) + ) + + return poynting + + def normalize(self, source_spectrum_fn: Callable[[float], complex]) -> SurfaceFieldData: + """Return copy of self after normalization is applied using source spectrum function.""" + fields_norm = {} + src_amps = FreqDataArray( + source_spectrum_fn(self.monitor.freqs), coords={"f": list(self.monitor.freqs)} + ) + for field_name, field_data in self.field_components.items(): + fields_norm[field_name] = field_data.updated_copy( + values=(field_data.values / src_amps).astype(field_data.values.dtype) + ) + + return self.copy(update=fields_norm) + + +class SurfaceFieldTimeData(ElectromagneticSurfaceFieldData): + """ + + Example + ------- + >>> from tidy3d import PointDataArray, IndexedFieldDataArray, TriangularSurfaceDataset, CellDataArray + >>> import tidy3d as td + >>> old_logging_level = td.config.logging_level + >>> td.config.logging_level = "ERROR" + >>> points = PointDataArray([[0, 0, 0], [0, 1, 0], [1, 1, 1]], dims=["index", "axis"]) + >>> cells = CellDataArray([[0, 1, 2]], dims=["cell_index", "vertex_index"]) + >>> values = PointDataArray([[1, 0, 0], [0, 1, 0], [0, 0, 1]], dims=["index", "axis"]) + >>> field_values = IndexedFieldDataArray(np.ones((3, 1, 3, 1)) + 0j, coords={"index": [0, 1, 2], "side": ["outside"],"axis": [0, 1, 2], "t": [1e-9]}) + >>> field = TriangularSurfaceDataset(points=points, cells=cells, values=field_values) + >>> normal = TriangularSurfaceDataset(points=points, cells=cells, values=values) + >>> monitor = SurfaceFieldTimeMonitor( + ... size=(2,4,6), interval=100, name='field', fields=['E', 'H'] + ... ) + >>> data = SurfaceFieldTimeData(monitor=monitor, E=field, H=field, normal=normal) + >>> td.config.logging_level = old_logging_level + """ + + monitor: SurfaceFieldTimeMonitor = pd.Field( + ..., title="Monitor", description="Time-domain field monitor associated with the data." + ) + + @property + def poynting(self) -> TriangularSurfaceDataset: + """Poynting vector for time-domain data.""" + + if self.E is None or self.H is None: + raise ValueError( + "Could not calculate poynting: the dataset does not contain E or H field information." + ) + e_field = self.E + h_field = self.H + + poynting = e_field.updated_copy( + values=np.real(xr.cross(e_field.values, np.conj(h_field.values), dim="axis")) + ) + + return poynting + + class PermittivityData(PermittivityDataset, AbstractFieldData): """Data for a :class:`.PermittivityMonitor`: diagonal components of the permittivity tensor. @@ -3959,6 +4153,8 @@ def fields_circular_polarization(self) -> xr.Dataset: FieldProjectionAngleData, DiffractionData, DirectivityData, + SurfaceFieldData, + SurfaceFieldTimeData, ) MonitorDataType = Union[MonitorDataTypes] diff --git a/tidy3d/components/data/unstructured/base.py b/tidy3d/components/data/unstructured/base.py index b5866fd2d6..2eb874b200 100644 --- a/tidy3d/components/data/unstructured/base.py +++ b/tidy3d/components/data/unstructured/base.py @@ -9,8 +9,9 @@ import numpy as np import pydantic.v1 as pd from xarray import DataArray as XrDataArray +from xarray import concat as xr_concat -from tidy3d.components.base import cached_property, skip_if_fields_missing +from tidy3d.components.base import Tidy3dBaseModel, cached_property, skip_if_fields_missing from tidy3d.components.data.data_array import ( DATA_ARRAY_MAP, CellDataArray, @@ -19,7 +20,6 @@ PointDataArray, SpatialDataArray, ) -from tidy3d.components.data.dataset import Dataset from tidy3d.components.types import ArrayLike, Axis, Bound from tidy3d.constants import inf from tidy3d.exceptions import DataError, Tidy3dNotImplementedError, ValidationError @@ -31,8 +31,8 @@ DEFAULT_TOLERANCE_CELL_FINDING = 1e-6 -class UnstructuredGridDataset(Dataset, np.lib.mixins.NDArrayOperatorsMixin, ABC): - """Abstract base for datasets that store unstructured grid data.""" +class UnstructuredDataset(Tidy3dBaseModel, np.lib.mixins.NDArrayOperatorsMixin, ABC): + """Abstract base for datasets that store unstructured grid or surface data.""" points: PointDataArray = pd.Field( ..., @@ -367,20 +367,16 @@ def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): # Only support operations with a scalar or an unstructured grid dataset of the same spatial dimensionality if not ( isinstance(x, numbers.Number) - or ( - isinstance(x, UnstructuredGridDataset) and x._point_dims() == self._point_dims() - ) + or (isinstance(x, type(self)) and x._point_dims() == self._point_dims()) ): raise Tidy3dNotImplementedError( f"Cannot perform arithmetic operations between instances of different classes ({type(self)} and {type(x)})." ) # Defer to the implementation of the ufunc on unwrapped values. - inputs = tuple(x.values if isinstance(x, UnstructuredGridDataset) else x for x in inputs) + inputs = tuple(x.values if isinstance(x, type(self)) else x for x in inputs) if out: - kwargs["out"] = tuple( - x.values if isinstance(x, UnstructuredGridDataset) else x for x in out - ) + kwargs["out"] = tuple(x.values if isinstance(x, type(self)) else x for x in out) result = getattr(ufunc, method)(*inputs, **kwargs) if type(result) is tuple: @@ -394,20 +390,28 @@ def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): return self.updated_copy(values=result) @property - def real(self) -> UnstructuredGridDataset: + def real(self) -> UnstructuredDataset: """Real part of dataset.""" return self.updated_copy(values=self.values.real) @property - def imag(self) -> UnstructuredGridDataset: + def imag(self) -> UnstructuredDataset: """Imaginary part of dataset.""" return self.updated_copy(values=self.values.imag) @property - def abs(self) -> UnstructuredGridDataset: + def abs(self) -> UnstructuredDataset: """Absolute value of dataset.""" return self.updated_copy(values=self.values.abs) + def conj(self) -> UnstructuredDataset: + """Complex conjugate value of dataset.""" + return self.updated_copy(values=self.values.conj()) + + def norm(self, dim) -> UnstructuredDataset: + """Compute vector norm along a given dimension.""" + return self.updated_copy(values=np.sqrt(self.values.dot(self.values.conj(), dim=dim).real)) + """ VTK interfacing """ @classmethod @@ -446,7 +450,7 @@ def _vtk_points(self): @property @requires_vtk - def _vtk_obj(self): + def _vtk_obj_empty(self): """A VTK representation (vtkUnstructuredGrid) of the grid.""" grid = vtk["mod"].vtkUnstructuredGrid() @@ -454,6 +458,15 @@ def _vtk_obj(self): grid.SetPoints(self._vtk_points) grid.SetCells(self._vtk_cell_type(), self._vtk_cells) + return grid + + @property + @requires_vtk + def _vtk_obj(self): + """A VTK representation (vtkUnstructuredGrid) of the grid.""" + + grid = self._vtk_obj_empty + if self.is_complex: # vtk doesn't support complex numbers # so we will store our complex array as a two-component vtk array @@ -495,7 +508,6 @@ def _read_vtkLegacyFile(fname: str): return grid @classmethod - @abstractmethod @requires_vtk def _from_vtk_obj( cls, @@ -504,10 +516,60 @@ def _from_vtk_obj( remove_degenerate_cells: bool = False, remove_unused_points: bool = False, values_type=IndexedDataArray, - expect_complex=None, - ignore_invalid_cells=False, - ) -> UnstructuredGridDataset: - """Initialize from a vtk object.""" + expect_complex: bool = False, + ignore_invalid_cells: bool = False, + ) -> UnstructuredDataset: + """Initialize from a vtkUnstructuredGrid instance.""" + + # read point, cells, and values info from a vtk instance + cells_numpy = vtk["vtk_to_numpy"](vtk_obj.GetCells().GetConnectivityArray()) + points_numpy = vtk["vtk_to_numpy"](vtk_obj.GetPoints().GetData()) + values = cls._get_values_from_vtk( + vtk_obj, len(points_numpy), field, values_type, expect_complex + ) + + # verify cell_types + cells_types = vtk["vtk_to_numpy"](vtk_obj.GetCellTypesArray()) + invalid_cells = cells_types != cls._vtk_cell_type() + if any(invalid_cells): + if ignore_invalid_cells: + cell_offsets = vtk["vtk_to_numpy"](vtk_obj.GetCells().GetOffsetsArray()) + valid_cell_offsets = cell_offsets[:-1][invalid_cells == 0] + cells_numpy = cells_numpy[ + np.ravel( + valid_cell_offsets[:, None] + + np.arange(cls._cell_num_vertices(), dtype=int)[None, :] + ) + ] + else: + raise DataError("Only tetrahedral 'vtkUnstructuredGrid' is currently supported") + + # pack point and cell information into Tidy3D arrays + num_cells = len(cells_numpy) // cls._cell_num_vertices() + cells_numpy = np.reshape(cells_numpy, (num_cells, cls._cell_num_vertices())) + + cells = CellDataArray( + cells_numpy, + coords={ + "cell_index": np.arange(num_cells), + "vertex_index": np.arange(cls._cell_num_vertices()), + }, + ) + + points = PointDataArray( + points_numpy, + coords={"index": np.arange(len(points_numpy)), "axis": np.arange(cls._point_dims())}, + ) + + if remove_degenerate_cells: + cells = cls._remove_degenerate_cells(cells=cells) + + if remove_unused_points: + points, values, cells = cls._remove_unused_points( + points=points, values=values, cells=cells + ) + + return cls(points=points, cells=cells, values=values) @requires_vtk def _from_vtk_obj_internal( @@ -515,7 +577,7 @@ def _from_vtk_obj_internal( vtk_obj, remove_degenerate_cells: bool = True, remove_unused_points: bool = True, - ) -> UnstructuredGridDataset: + ) -> UnstructuredDataset: """Initialize from a vtk object when performing internal operations. When we do that we pass structure of possibly multidimensional nature of values through parametes field and values_type. We also turn on by default cleaning of geometry.""" @@ -537,7 +599,7 @@ def from_vtu( remove_degenerate_cells: bool = False, remove_unused_points: bool = False, ignore_invalid_cells: bool = False, - ) -> UnstructuredGridDataset: + ) -> UnstructuredDataset: """Load unstructured data from a vtu file. Parameters @@ -555,8 +617,8 @@ def from_vtu( Returns ------- - UnstructuredGridDataset - Unstructured data. + UnstructuredDataset + Unstructured dataset. """ grid = cls._read_vtkUnstructuredGrid(file) return cls._from_vtk_obj( @@ -736,7 +798,7 @@ def get_cell_volumes(self): @requires_vtk def _plane_slice_raw(self, axis: Axis, pos: float): - """Slice data with a plane and return the resulting VTK object.""" + """Slice dataset with a plane and return the resulting VTK object.""" if pos > self.bounds[1][axis] or pos < self.bounds[0][axis]: raise DataError( @@ -774,9 +836,9 @@ def _plane_slice_raw(self, axis: Axis, pos: float): @abstractmethod @requires_vtk - def plane_slice(self, axis: Axis, pos: float) -> Union[XrDataArray, UnstructuredGridDataset]: - """Slice data with a plane and return the Tidy3D representation of the result - (``UnstructuredGridDataset``). + def plane_slice(self, axis: Axis, pos: float) -> Union[XrDataArray, UnstructuredDataset]: + """Slice dataset with a plane and return the Tidy3D representation of the result + (``UnstructuredDataset``). Parameters ---------- @@ -787,13 +849,13 @@ def plane_slice(self, axis: Axis, pos: float) -> Union[XrDataArray, Unstructured Returns ------- - Union[xarray.DataArray, UnstructuredGridDataset] + Union[xarray.DataArray, UnstructuredDataset] The resulting slice. """ @requires_vtk - def box_clip(self, bounds: Bound) -> UnstructuredGridDataset: - """Clip the unstructured grid using a box defined by ``bounds``. + def box_clip(self, bounds: Bound) -> UnstructuredDataset: + """Clip the unstructured dataset using a box defined by ``bounds``. Parameters ---------- @@ -802,8 +864,8 @@ def box_clip(self, bounds: Bound) -> UnstructuredGridDataset: Returns ------- - UnstructuredGridDataset - Clipped grid. + UnstructuredDataset + Clipped dataset. """ # make and run a VTK clipper @@ -831,13 +893,55 @@ def box_clip(self, bounds: Bound) -> UnstructuredGridDataset: return self._from_vtk_obj_internal(clean_clip) + @cached_property + @requires_vtk + def _boundary_points_indices(self): + """Find points that lie on open edges/faces.""" + + surface_filter = vtk["mod"].vtkDataSetSurfaceFilter() + surface_filter.SetInputData(self._vtk_obj_empty) + surface_filter.PassThroughPointIdsOn() # Important for getting original indices + surface_filter.Update() + + boundary_edges_vtk = surface_filter.GetOutput() + + if self._cell_num_vertices() == 3: + feature_edges = vtk["mod"].vtkFeatureEdges() + feature_edges.SetInputData(boundary_edges_vtk) + + # Enable the extraction of boundary edges + feature_edges.BoundaryEdgesOn() + + # Disable other types of edges to get only the boundary + feature_edges.FeatureEdgesOff() + feature_edges.ManifoldEdgesOff() + feature_edges.NonManifoldEdgesOff() + + feature_edges.Update() + boundary_edges_vtk = feature_edges.GetOutput() + + if boundary_edges_vtk.GetNumberOfCells() == 0: + # Mesh is watertight, no boundary + return np.array([], dtype=int) + + # Get the original point indices + original_ids_array = vtk["vtk_to_numpy"]( + boundary_edges_vtk.GetPointData().GetArray("vtkOriginalPointIds") + ) + boundary_point_indices = original_ids_array.copy() + return boundary_point_indices.astype(int) + @requires_vtk def reflect( - self, axis: Axis, center: float, reflection_only: bool = False - ) -> UnstructuredGridDataset: - """Reflect unstructured data across the plane define by parameters ``axis`` and ``center``. - By default the original data is preserved, setting ``reflection_only`` to ``True`` will - produce only deflected data. + self, + axis: Axis, + center: float, + reflection_only: bool = False, + symmetry: Union[Literal[-1, 1], XrDataArray] = 1, + ) -> UnstructuredDataset: + """Reflect unstructured dataset across the plane define by parameters ``axis`` and ``center``. + By default the original dataset is preserved, setting ``reflection_only`` to ``True`` will + produce only reflected dataset. Parameters ---------- @@ -846,26 +950,178 @@ def reflect( center : float Location of the reflection plane along its normal direction. reflection_only : bool = False - Return only reflected data. + Return only reflected dataset. + symmetry : Union[Literal[-1, 1], XrDataArray] = 1 + Symmetry of the reflected field. Returns ------- - UnstructuredGridDataset - Data after reflextion is performed. + UnstructuredDataset + Dataset after reflextion is performed. """ + # validate that if symmetry is an xarray, its dims are a subset of the values dims + # and coords values coincide along those dims + if isinstance(symmetry, XrDataArray): + value_dims = set(self.values.dims) - {"index"} + sym_dims = set(symmetry.dims) + if not sym_dims.issubset(value_dims): + raise DataError( + f"Symmetry xarray dimensions {sym_dims} must be a subset of values dimensions {value_dims}" + ) + # Check that coordinates match along shared dimensions + for dim in sym_dims: + if not np.array_equal(symmetry.coords[dim], self.values.coords[dim]): + raise DataError( + f"Coordinate values for dimension '{dim}' must match between symmetry and values" + ) + + if reflection_only: + reflected_points = self.points + reflected_points.loc[{"axis": axis}] = 2 * center - self.points.sel(axis=axis) + return self.updated_copy(points=reflected_points, values=self.values * symmetry) + + # record number of existing points + num_points = len(self.points) + + # detect points that are not on the reflection plane and on open edges + # Those will need to be duplicated + points_off_plane_map = np.ones(len(self.points), dtype=bool) + points_off_plane_map[self._boundary_points_indices] = ~np.isclose( + self.points.sel(axis=axis).data[self._boundary_points_indices], + center, + atol=1e-4, + rtol=1e-4, + ) + num_new_points = np.sum(points_off_plane_map) + + # create new points id + new_points_id = np.arange(num_new_points) + num_points + + new_points_id_map = -1 * np.ones_like(points_off_plane_map, dtype=int) + new_points_id_map[points_off_plane_map] = new_points_id + + new_points = self.points.sel(index=points_off_plane_map).copy() + new_points.loc[{"axis": axis}] = 2 * center - new_points.sel(axis=axis) + + # create new cells + new_cells = self.cells.copy() + new_points_in_new_cells_map = points_off_plane_map[new_cells] + new_points_in_new_cells_orig_id = new_cells.data[new_points_in_new_cells_map] + new_cells.data[new_points_in_new_cells_map] = new_points_id_map[ + new_points_in_new_cells_orig_id + ] + + # create new values + new_values = self.values.sel(index=points_off_plane_map).copy() * symmetry - reflector = vtk["mod"].vtkReflectionFilter() - reflector.SetPlane([reflector.USE_X, reflector.USE_Y, reflector.USE_Z][axis]) - reflector.SetCenter(center) - reflector.SetCopyInput(not reflection_only) - reflector.SetInputData(self._vtk_obj) - reflector.Update() + # combine with original data + combined_points = xr_concat([self.points, new_points], dim="index") + combined_values = xr_concat([self.values, new_values], dim="index") + combined_cells = xr_concat([self.cells, new_cells], dim="cell_index") - # since reflection does not really change geometries, let's not clean it - return self._from_vtk_obj_internal( - reflector.GetOutput(), remove_degenerate_cells=False, remove_unused_points=False + combined_points.coords["index"] = np.arange(len(combined_points)) + combined_values.coords["index"] = np.arange(len(combined_values)) + combined_cells.coords["cell_index"] = np.arange(len(combined_cells)) + + return self.updated_copy( + points=combined_points, cells=combined_cells, values=combined_values ) + """ Data selection """ + + @requires_vtk + def sel( + self, + x: Union[float, ArrayLike] = None, + y: Union[float, ArrayLike] = None, + z: Union[float, ArrayLike] = None, + method: Optional[Literal["None", "nearest", "pad", "ffill", "backfill", "bfill"]] = None, + **sel_kwargs, + ) -> Union[UnstructuredGridDataset, XrDataArray]: + """Extract/interpolate data along one or more spatial or non-spatial directions. Must provide at least one argument + among 'x', 'y', 'z' or non-spatial dimensions through additional arguments. Along spatial dimensions a suitable slicing of + grid is applied (plane slice, line slice, or interpolation). Selection along non-spatial dimensions is forwarded to + .sel() xarray function. Parameter 'method' applies only to non-spatial dimensions. + + Parameters + ---------- + x : Union[float, ArrayLike] = None + x-coordinate of the slice. + y : Union[float, ArrayLike] = None + y-coordinate of the slice. + z : Union[float, ArrayLike] = None + z-coordinate of the slice. + method: Literal[None, "nearest", "pad", "ffill", "backfill", "bfill"] = None + Method to use in xarray sel() function. + **sel_kwargs : dict + Keyword arguments to pass to the xarray sel() function. + + Returns + ------- + Union[TriangularGridDataset, xarray.DataArray] + Extracted data. + """ + + def _non_spatial_sel( + self, + method=None, + **sel_kwargs, + ) -> XrDataArray: + """Select/interpolate data along one or more non-Cartesian directions. + + Parameters + ---------- + **sel_kwargs : dict + Keyword arguments to pass to the xarray sel() function. + + Returns + ------- + xarray.DataArray + Extracted data. + """ + + if "index" in sel_kwargs.keys(): + raise DataError("Cannot select along dimension 'index'.") + + # convert individual values into lists of length 1 + # so that xarray doesn't drop the corresponding dimension + sel_kwargs_only_lists = { + key: value if isinstance(value, list) else [value] for key, value in sel_kwargs.items() + } + return self.updated_copy(values=self.values.sel(**sel_kwargs_only_lists, method=method)) + + def isel( + self, + **sel_kwargs, + ) -> XrDataArray: + """Select data along one or more non-Cartesian directions by coordinate index. + + Parameters + ---------- + **sel_kwargs : dict + Keyword arguments to pass to the xarray isel() function. + + Returns + ------- + xarray.DataArray + Extracted data. + """ + + if "index" in sel_kwargs.keys(): + raise DataError("Cannot select along dimension 'index'.") + + # convert individual values into lists of length 1 + # so that xarray doesn't drop the corresponding dimension + sel_kwargs_only_lists = { + key: value if isinstance(value, list) else [value] for key, value in sel_kwargs.items() + } + return self.updated_copy(values=self.values.isel(**sel_kwargs_only_lists)) + # return self.updated_copy(values=self.values.isel(**sel_kwargs)) + + +class UnstructuredGridDataset(UnstructuredDataset, np.lib.mixins.NDArrayOperatorsMixin, ABC): + """Abstract base for datasets that store unstructured grid data.""" + """ Interpolation """ def interp( @@ -1694,94 +1950,6 @@ def _interp_py_chunk( """ Data selection """ - @requires_vtk - def sel( - self, - x: Union[float, ArrayLike] = None, - y: Union[float, ArrayLike] = None, - z: Union[float, ArrayLike] = None, - method: Optional[Literal["None", "nearest", "pad", "ffill", "backfill", "bfill"]] = None, - **sel_kwargs, - ) -> Union[UnstructuredGridDataset, XrDataArray]: - """Extract/interpolate data along one or more spatial or non-spatial directions. Must provide at least one argument - among 'x', 'y', 'z' or non-spatial dimensions through additional arguments. Along spatial dimensions a suitable slicing of - grid is applied (plane slice, line slice, or interpolation). Selection along non-spatial dimensions is forwarded to - .sel() xarray function. Parameter 'method' applies only to non-spatial dimensions. - - Parameters - ---------- - x : Union[float, ArrayLike] = None - x-coordinate of the slice. - y : Union[float, ArrayLike] = None - y-coordinate of the slice. - z : Union[float, ArrayLike] = None - z-coordinate of the slice. - method: Literal[None, "nearest", "pad", "ffill", "backfill", "bfill"] = None - Method to use in xarray sel() function. - **sel_kwargs : dict - Keyword arguments to pass to the xarray sel() function. - - Returns - ------- - Union[TriangularGridDataset, xarray.DataArray] - Extracted data. - """ - - def _non_spatial_sel( - self, - method=None, - **sel_kwargs, - ) -> XrDataArray: - """Select/interpolate data along one or more non-Cartesian directions. - - Parameters - ---------- - **sel_kwargs : dict - Keyword arguments to pass to the xarray sel() function. - - Returns - ------- - xarray.DataArray - Extracted data. - """ - - if "index" in sel_kwargs.keys(): - raise DataError("Cannot select along dimension 'index'.") - - # convert individual values into lists of length 1 - # so that xarray doesn't drop the corresponding dimension - sel_kwargs_only_lists = { - key: value if isinstance(value, list) else [value] for key, value in sel_kwargs.items() - } - return self.updated_copy(values=self.values.sel(**sel_kwargs_only_lists, method=method)) - - def isel( - self, - **sel_kwargs, - ) -> XrDataArray: - """Select data along one or more non-Cartesian directions by coordinate index. - - Parameters - ---------- - **sel_kwargs : dict - Keyword arguments to pass to the xarray isel() function. - - Returns - ------- - xarray.DataArray - Extracted data. - """ - - if "index" in sel_kwargs.keys(): - raise DataError("Cannot select along dimension 'index'.") - - # convert individual values into lists of length 1 - # so that xarray doesn't drop the corresponding dimension - sel_kwargs_only_lists = { - key: value if isinstance(value, list) else [value] for key, value in sel_kwargs.items() - } - return self.updated_copy(values=self.values.isel(**sel_kwargs_only_lists)) - @requires_vtk def sel_inside(self, bounds: Bound) -> UnstructuredGridDataset: """Return a new UnstructuredGridDataset that contains the minimal amount data necessary to diff --git a/tidy3d/components/data/unstructured/surface.py b/tidy3d/components/data/unstructured/surface.py new file mode 100644 index 0000000000..7338d67567 --- /dev/null +++ b/tidy3d/components/data/unstructured/surface.py @@ -0,0 +1,463 @@ +"""Defines triangular grid datasets.""" + +from __future__ import annotations + +from typing import Literal, Optional, Union + +import numpy as np +import pydantic.v1 as pd +from xarray import DataArray as XrDataArray + +from tidy3d.components.base import cached_property +from tidy3d.components.data.data_array import ( + CellDataArray, + IndexedDataArrayTypes, + PointDataArray, +) +from tidy3d.components.types import ArrayLike, Axis +from tidy3d.components.viz import add_plotter_if_none +from tidy3d.exceptions import DataError, Tidy3dNotImplementedError +from tidy3d.packaging import pyvista, requires_pyvista, requires_vtk, vtk + +from .base import ( + UnstructuredDataset, +) + + +class TriangularSurfaceDataset(UnstructuredDataset): + """Dataset for storing triangulated surface data. Data values are associated with the nodes of + the mesh. + + Note + ---- + To use full functionality of unstructured datasets one must install ``vtk`` package (``pip + install tidy3d[vtk]`` or ``pip install vtk``). For visualization, ``pyvista`` is recommended + (``pip install pyvista``). Otherwise the functionality of unstructured + datasets is limited to creation, writing to/loading from a file, and arithmetic manipulations. + + Example + ------- + >>> import numpy as np + >>> from tidy3d.components.data.data_array import PointDataArray, CellDataArray, IndexedDataArray + >>> + >>> # Create a simple triangulated surface + >>> tri_grid_points = PointDataArray( + ... [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [1.0, 1.0, 0.0]], + ... coords=dict(index=np.arange(4), axis=np.arange(3)), + ... ) + >>> + >>> tri_grid_cells = CellDataArray( + ... [[0, 1, 2], [1, 2, 3]], + ... coords=dict(cell_index=np.arange(2), vertex_index=np.arange(3)), + ... ) + >>> + >>> tri_grid_values = IndexedDataArray( + ... [1.0, 2.0, 3.0, 4.0], coords=dict(index=np.arange(4)), + ... ) + >>> + >>> tri_grid = TriangularSurfaceDataset( + ... points=tri_grid_points, + ... cells=tri_grid_cells, + ... values=tri_grid_values, + ... ) + >>> + >>> # Visualize the surface (change show=False to show=True to display the plot) + >>> _ = tri_grid.plot(show=False) + >>> + >>> # Customize the visualization (change show=False to show=True to display the plot) + >>> _ = tri_grid.plot(cmap='plasma', grid=True, grid_color='white', show=False) + >>> + >>> # For vector fields + >>> vector_values = IndexedDataArray( + ... [[1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 1, 0]], + ... coords=dict(index=np.arange(4), axis=np.arange(3)), + ... ) + >>> vector_grid = tri_grid.updated_copy(values=vector_values) + >>> + >>> # Plot as arrow field (change show=False to show=True to display the plot) + >>> _ = vector_grid.quiver(scale=0.2, show=False) + """ + + points: PointDataArray = pd.Field( + ..., + title="Surface Points", + description="Coordinates of points composing the triangulated surface.", + ) + + values: IndexedDataArrayTypes = pd.Field( + ..., + title="Surface Values", + description="Values stored at the surface points.", + ) + + cells: CellDataArray = pd.Field( + ..., + title="Surface Cells", + description="Cells composing the triangulated surface specified as connections between surface " + "points.", + ) + + """ Fundamental parameters to set up based on grid dimensionality """ + + @classmethod + def _point_dims(cls) -> pd.PositiveInt: + """Dimensionality of stored surface point coordinates.""" + return 3 + + @classmethod + def _cell_num_vertices(cls) -> pd.PositiveInt: + """Number of vertices in a cell.""" + return 3 + + """ Convenience properties """ + + @cached_property + def _points_3d_array(self) -> ArrayLike: + """3D representation of points.""" + return self.points.data + + """ VTK interfacing """ + + @classmethod + @requires_vtk + def _vtk_cell_type(cls): + """VTK cell type to use in the VTK representation.""" + return vtk["mod"].VTK_TRIANGLE + + """ Grid operations """ + + @requires_vtk + def plane_slice(self, axis: Axis, pos: float) -> XrDataArray: + """Slice data with a plane and return the resulting line as a DataArray. + + Parameters + ---------- + axis : Axis + The normal direction of the slicing plane. + pos : float + Position of the slicing plane along its normal direction. + + Returns + ------- + xarray.DataArray + The resulting slice. + """ + + raise Tidy3dNotImplementedError("Slicing of unstructured surfaces is not implemented yet.") + + """ Data selection """ + + @requires_vtk + def sel( + self, + x: Union[float, ArrayLike] = None, + y: Union[float, ArrayLike] = None, + z: Union[float, ArrayLike] = None, + method: Optional[Literal["None", "nearest", "pad", "ffill", "backfill", "bfill"]] = None, + **sel_kwargs, + ) -> Union[TriangularSurfaceDataset, XrDataArray]: + """Extract/interpolate data along one or more spatial or non-spatial directions. + Currently works only for non-spatial dimensions through additional arguments. + Selection along non-spatial dimensions is forwarded to + .sel() xarray function. Parameter 'method' applies only to non-spatial dimensions. + + Parameters + ---------- + x : Union[float, ArrayLike] = None + x-coordinate of the slice. + y : Union[float, ArrayLike] = None + y-coordinate of the slice. + z : Union[float, ArrayLike] = None + z-coordinate of the slice. + method: Literal[None, "nearest", "pad", "ffill", "backfill", "bfill"] = None + Method to use in xarray sel() function. + **sel_kwargs : dict + Keyword arguments to pass to the xarray sel() function. + + Returns + ------- + Union[TriangularSurfaceDataset, xarray.DataArray] + Extracted data. + """ + + if any(comp is not None for comp in [x, y, z]): + raise Tidy3dNotImplementedError( + "Surface datasets do not support selection along x, y, or z yet." + ) + + return self._non_spatial_sel(method=method, **sel_kwargs) + + def get_cell_volumes(self): + """Get areas associated to each cell of the grid.""" + v0 = self.points[self.cells.sel(vertex_index=0)] + e01 = self.points[self.cells.sel(vertex_index=1)] - v0 + e02 = self.points[self.cells.sel(vertex_index=2)] - v0 + + return 0.5 * np.linalg.norm(np.cross(e01, e02), axis=1) + + """ Plotting """ + + @requires_pyvista + @add_plotter_if_none + def plot( + self, + plotter=None, + field: bool = True, + grid: bool = False, + cbar: bool = True, + cmap: str = "viridis", + vmin: Optional[float] = None, + vmax: Optional[float] = None, + grid_color: str = "black", + grid_width: float = 1.0, + opacity: float = 1.0, + show: bool = True, + windowed: Optional[bool] = None, + window_size: tuple = (800, 600), + **mesh_kwargs, + ): + """Plot the surface mesh and/or associated data using PyVista. + + Parameters + ---------- + plotter : pyvista.Plotter = None + PyVista plotter to add the mesh to. If not specified, one is created. + field : bool = True + Whether to plot the data field. + grid : bool = False + Whether to show the mesh edges/grid lines. + cbar : bool = True + Display scalar bar (only if ``field == True``). + cmap : str = "viridis" + Color map to use for plotting. + vmin : float = None + The lower bound of data range that the colormap covers. If ``None``, + inferred from the data. + vmax : float = None + The upper bound of data range that the colormap covers. If ``None``, + inferred from the data. + grid_color : str = "black" + Color of grid lines when ``grid == True``. + grid_width : float = 1.0 + Width of grid lines when ``grid == True``. + opacity : float = 1.0 + Opacity of the mesh (0=transparent, 1=opaque). + show : bool = True + Whether to display the plot immediately. If False, returns plotter for + further customization. + windowed : bool = None + Whether to display in an external window. If None (default), automatically + detects environment (inline in notebooks, windowed otherwise). Set to True + to force external window even when running in a notebook, which provides + better interactivity and performance. Only used when ``plotter`` is None. + window_size : tuple = (800, 600) + Size of the window (width, height) when creating new plotter. + **mesh_kwargs + Additional keyword arguments passed to plotter.add_mesh(). + + Returns + ------- + pyvista.Plotter + The plotter object with the mesh added. Returns None if ``show=True`` and + plotter was auto-created. + + Raises + ------ + DataError + If nothing to plot or if multiple fields present without selection. + """ + + if not (field or grid): + raise DataError("Nothing to plot ('field == False', 'grid == False').") + + # Validate field selection + if field and self._num_fields != 1: + raise DataError( + "Unstructured dataset contains more than 1 field. " + "Use '.sel()' to select a single field from available dimensions " + f"{self._values_coords_dict} before plotting." + ) + + # Check for complex values + if field and self.is_complex: + raise DataError( + "Cannot plot complex-valued data directly. " + "Please select a real component using '.real', '.imag', or '.abs' before plotting." + ) + + # Get PyVista module + pv = pyvista["mod"] + values_name = self.values.name if self.values.name else "values" + + # Create PyVista PolyData directly from points and cells + # Faces format: [3, v0, v1, v2, 3, v3, v4, v5, ...] where 3 is the number of vertices + faces = np.hstack([np.full((len(self.cells), 1), 3, dtype=int), self.cells.data]) + mesh = pv.PolyData(self.points.data, faces.ravel()) + + # Setup scalar field if needed + scalars = None + if field: + # Add scalar values to the mesh + data_values = self.values.values + if len(self._fields_shape) > 0: + data_values = data_values.reshape(len(self.points.values), self._num_fields) + mesh[values_name] = data_values + scalars = values_name + + # Add mesh to plotter + plotter.add_mesh( + mesh, + scalars=scalars if field else None, + cmap=cmap, + clim=(vmin, vmax) if (vmin is not None or vmax is not None) else None, + show_edges=grid, + edge_color=grid_color, + line_width=grid_width, + opacity=opacity, + show_scalar_bar=False, # We'll manually add scalar bar if cbar=True + **mesh_kwargs, + ) + + # Add scalar bar + if field and cbar: + scalar_bar_args = { + "title": self.values.name or "Value", + "n_labels": 5, + "italic": False, + "fmt": "%.2e", + "font_family": "arial", + } + plotter.add_scalar_bar(**scalar_bar_args) + + # Set axis labels + plotter.add_axes(xlabel="x", ylabel="y", zlabel="z") + + return plotter + + @requires_pyvista + @add_plotter_if_none + def quiver( + self, + plotter=None, + dim: str = "axis", + scale: float = 0.1, + downsampling: int = 1, + color: str = "magnitude", + cbar: bool = True, + cmap: str = "Spectral", + show: bool = True, + windowed: Optional[bool] = None, + window_size: tuple = (800, 600), + **arrow_kwargs, + ): + """Plot the associated data as vector field using arrows. + + Field ``values`` must have length 3 along the dimension representing x, y, and z components. + + Parameters + ---------- + plotter : pyvista.Plotter = None + PyVista plotter to add the arrows to. If not specified, one is created. + dim : str = "axis" + Dimension along which x, y, z components are stored. + scale : float = 0.1 + Size of arrows relative to the diagonal length of the surface bounding box. + downsampling : int = 1 + Step for selecting points for plotting (1 for plotting all points). + color : str = "magnitude" + How to color arrows. If "magnitude", colors by vector magnitude using cmap. + Otherwise, should be a valid color string (e.g., 'red', 'blue', '#FF0000'). + cbar : bool = True + Display scalar bar (only if ``color == "magnitude"``). + cmap : str = "Spectral" + Color map to use when coloring by magnitude. + show : bool = True + Whether to display the plot immediately. + windowed : bool = None + Whether to display in an external window. If None (default), automatically + detects environment (inline in notebooks, windowed otherwise). Set to True + to force external window. Only used when ``plotter`` is None. + window_size : tuple = (800, 600) + Size of the window (width, height) when creating new plotter. + **arrow_kwargs + Additional keyword arguments passed to plotter.add_mesh() for the arrow glyphs. + + Returns + ------- + pyvista.Plotter + The plotter object with arrows added. Returns None if ``show=True`` and + plotter was auto-created. + + Raises + ------ + DataError + If dataset doesn't contain exactly 3 fields for vector components. + """ + + # Validate vector field + if self._num_fields != 3: + raise DataError( + "Unstructured dataset must contain exactly 3 fields for quiver plotting. " + "Use '.sel()' to select fields from available dimensions " + f"{self._values_coords_dict} before plotting." + ) + + # Extract downsampled points + points = self.points.data[::downsampling] + + # Extract vector components + u = self.values.sel(**{dim: 0}).real.data[::downsampling] + v = self.values.sel(**{dim: 1}).real.data[::downsampling] + w = self.values.sel(**{dim: 2}).real.data[::downsampling] + vectors = np.column_stack([u, v, w]) + + # Compute magnitude + mag = np.linalg.norm(vectors, axis=1) + mag_max = np.max(mag) + + # Compute scaling factor + bounds = np.array(self.bounds) + size = np.subtract(bounds[1], bounds[0]) + diag = np.linalg.norm(size) + scale_factor = scale * diag / mag_max if mag_max > 0 else scale * diag + + # Get PyVista module + pv = pyvista["mod"] + + # Create point cloud with scaled vectors + point_cloud = pv.PolyData(points) + point_cloud["vectors"] = vectors * scale_factor + + # Create arrow glyphs + arrows = point_cloud.glyph( + orient="vectors", + scale=False, # We already scaled the vectors + factor=1.0, + geom=pv.Arrow(), + ) + + # Add magnitude data for coloring + if color == "magnitude": + # Each point generates multiple vertices for the arrow geometry + # We need to repeat the magnitude for all vertices of each arrow + n_points_per_arrow = arrows.n_points // len(points) + arrows["magnitude"] = np.repeat(mag, n_points_per_arrow) + + plotter.add_mesh(arrows, scalars="magnitude", cmap=cmap, **arrow_kwargs) + + if cbar: + scalar_bar_args = { + "title": self.values.name or "Magnitude", + "n_labels": 5, + "italic": False, + "fmt": "%.2e", + "font_family": "arial", + } + plotter.add_scalar_bar(**scalar_bar_args) + else: + plotter.add_mesh(arrows, color=color, **arrow_kwargs) + + # Set axis labels + plotter.add_axes(xlabel="x", ylabel="y", zlabel="z") + + return plotter diff --git a/tidy3d/components/data/unstructured/tetrahedral.py b/tidy3d/components/data/unstructured/tetrahedral.py index 42ac4b58b1..f7f82a93f3 100644 --- a/tidy3d/components/data/unstructured/tetrahedral.py +++ b/tidy3d/components/data/unstructured/tetrahedral.py @@ -9,11 +9,6 @@ from xarray import DataArray as XrDataArray from tidy3d.components.base import cached_property -from tidy3d.components.data.data_array import ( - CellDataArray, - IndexedDataArray, - PointDataArray, -) from tidy3d.components.types import ArrayLike, Axis, Bound, Coordinate from tidy3d.exceptions import DataError from tidy3d.packaging import requires_vtk, vtk @@ -34,6 +29,9 @@ class TetrahedralGridDataset(UnstructuredGridDataset): Example ------- + >>> import numpy as np + >>> from tidy3d.components.data.data_array import PointDataArray, CellDataArray, IndexedDataArray + >>> >>> tet_grid_points = PointDataArray( ... [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], ... coords=dict(index=np.arange(4), axis=np.arange(3)), @@ -87,70 +85,6 @@ def _vtk_cell_type(cls): """VTK cell type to use in the VTK representation.""" return vtk["mod"].VTK_TETRA - @classmethod - @requires_vtk - def _from_vtk_obj( - cls, - vtk_obj, - field=None, - remove_degenerate_cells: bool = False, - remove_unused_points: bool = False, - values_type=IndexedDataArray, - expect_complex: bool = False, - ignore_invalid_cells: bool = False, - ) -> TetrahedralGridDataset: - """Initialize from a vtkUnstructuredGrid instance.""" - - # read point, cells, and values info from a vtk instance - cells_numpy = vtk["vtk_to_numpy"](vtk_obj.GetCells().GetConnectivityArray()) - points_numpy = vtk["vtk_to_numpy"](vtk_obj.GetPoints().GetData()) - values = cls._get_values_from_vtk( - vtk_obj, len(points_numpy), field, values_type, expect_complex - ) - - # verify cell_types - cells_types = vtk["vtk_to_numpy"](vtk_obj.GetCellTypesArray()) - invalid_cells = cells_types != cls._vtk_cell_type() - if any(invalid_cells): - if ignore_invalid_cells: - cell_offsets = vtk["vtk_to_numpy"](vtk_obj.GetCells().GetOffsetsArray()) - valid_cell_offsets = cell_offsets[:-1][invalid_cells == 0] - cells_numpy = cells_numpy[ - np.ravel( - valid_cell_offsets[:, None] - + np.arange(cls._cell_num_vertices(), dtype=int)[None, :] - ) - ] - else: - raise DataError("Only tetrahedral 'vtkUnstructuredGrid' is currently supported") - - # pack point and cell information into Tidy3D arrays - num_cells = len(cells_numpy) // cls._cell_num_vertices() - cells_numpy = np.reshape(cells_numpy, (num_cells, cls._cell_num_vertices())) - - cells = CellDataArray( - cells_numpy, - coords={ - "cell_index": np.arange(num_cells), - "vertex_index": np.arange(cls._cell_num_vertices()), - }, - ) - - points = PointDataArray( - points_numpy, - coords={"index": np.arange(len(points_numpy)), "axis": np.arange(cls._point_dims())}, - ) - - if remove_degenerate_cells: - cells = cls._remove_degenerate_cells(cells=cells) - - if remove_unused_points: - points, values, cells = cls._remove_unused_points( - points=points, values=values, cells=cells - ) - - return cls(points=points, cells=cells, values=values) - """ Grid operations """ @requires_vtk diff --git a/tidy3d/components/data/unstructured/triangular.py b/tidy3d/components/data/unstructured/triangular.py index 32d54f259b..b08b53c328 100644 --- a/tidy3d/components/data/unstructured/triangular.py +++ b/tidy3d/components/data/unstructured/triangular.py @@ -275,7 +275,7 @@ def plane_slice(self, axis: Axis, pos: float) -> XrDataArray: @requires_vtk def reflect( - self, axis: Axis, center: float, reflection_only: bool = False + self, axis: Axis, center: float, reflection_only: bool = False, symmetry: float = 1.0 ) -> UnstructuredGridDataset: """Reflect unstructured data across the plane define by parameters ``axis`` and ``center``. By default the original data is preserved, setting ``reflection_only`` to ``True`` will @@ -289,6 +289,8 @@ def reflect( Location of the reflection plane along its normal direction. reflection_only : bool = False Return only reflected data. + symmetry : float = 1.0 + Symmetry factor to apply to the data. Returns ------- @@ -299,13 +301,21 @@ def reflect( # disallow reflecting along normal direction if axis == self.normal_axis: if reflection_only: - return self.updated_copy(normal_pos=2 * center - self.normal_pos) + return self.updated_copy( + values=self.values * symmetry, normal_pos=2 * center - self.normal_pos + ) else: raise DataError( "Reflection in the normal direction to the grid is prohibited unless 'reflection_only=True'." ) - return super().reflect(axis=axis, center=center, reflection_only=reflection_only) + tan_dims = [0, 1, 2] + tan_dims.remove(self.normal_axis) + tan_axis = tan_dims.index(axis) + + return super().reflect( + axis=tan_axis, center=center, reflection_only=reflection_only, symmetry=symmetry + ) """ Interpolation """ diff --git a/tidy3d/components/medium.py b/tidy3d/components/medium.py index e733df186c..3ff4a48d92 100644 --- a/tidy3d/components/medium.py +++ b/tidy3d/components/medium.py @@ -8103,3 +8103,20 @@ def medium_from_nk(n: float, k: float, freq: float, **kwargs) -> Union[Medium, L if eps_complex.real >= 1: return Medium.from_nk(n, k, freq, **kwargs) return Lorentz.from_nk(n, k, freq, **kwargs) + + +def _is_pec_like(medium: MediumType3D) -> bool: + """Check whether the medium is PEC or lossy metal. + + Parameters + ---------- + medium : MediumType3D + Medium to check + + Returns + ------- + bool + True if PEC or lossy metal. + """ + + return medium.is_pec or isinstance(medium, LossyMetalMedium) diff --git a/tidy3d/components/monitor.py b/tidy3d/components/monitor.py index 693c22623e..cfb1f2d159 100644 --- a/tidy3d/components/monitor.py +++ b/tidy3d/components/monitor.py @@ -27,6 +27,7 @@ Coordinate, Direction, EMField, + EMSurfaceField, FreqArray, FreqBound, ObsGridArray, @@ -1576,6 +1577,182 @@ def _storage_size_solver(self, num_cells: int, tmesh: ArrayFloat1D) -> int: return BYTES_COMPLEX * num_cells * len(self.freqs) * 6 +class AbstractSurfaceMonitor(Monitor, ABC): + """:class:`Monitor` that records electromagnetic field data as a function of x,y,z on PEC surfaces.""" + + fields: tuple[EMSurfaceField, ...] = pydantic.Field( + ["E", "H"], + title="Field Components", + description="Collection of field components to store in the monitor.", + ) + + interval_space: tuple[Literal[1], Literal[1], Literal[1]] = pydantic.Field( + (1, 1, 1), + title="Spatial Interval", + description="Number of grid step intervals between monitor recordings. If equal to 1, " + "there will be no downsampling. If greater than 1, the step will be applied, but the " + "first and last point of the monitor grid are always included.", + ) + + colocate: Literal[False] = pydantic.Field( + False, + title="Colocate Fields", + description="For surface monitors fields are always colocated on surface.", + ) + + @pydantic.validator("fields", always=True) + def _warn_beta_stage(cls, val, values): + """Warn that surface monitors are in beta stage.""" + + log.warning( + "Surface monitors are currently in beta stage. Please exercise caution when analyzing " + "surface monitor data and verify results carefully. If you encounter any issues, " + "please report them to our support team.", + log_once=True, + ) + return val + + +class SurfaceFieldMonitor(AbstractSurfaceMonitor, FreqMonitor): + """:class:`Monitor` that records electromagnetic fields in the frequency domain on PEC surfaces. + + Notes + ----- + + :class:`SurfaceFieldMonitor` objects operate by running a discrete Fourier transform of the fields at a given set of + frequencies to perform the calculation "in-place" with the time stepping. These monitors are specifically designed + to record fields on PEC (perfect electric conductor) surfaces, storing the normal E and tangential H fields. + + Example + ------- + >>> import tidy3d as td + >>> old_logging_level = td.config.logging_level + >>> td.config.logging_level = "ERROR" + >>> monitor = SurfaceFieldMonitor( + ... center=(1,2,3), + ... size=(2,2,2), + ... fields=['E', 'H'], + ... freqs=[250e12, 300e12], + ... name='surface_monitor') + >>> td.config.logging_level = old_logging_level + """ + + def storage_size(self, num_cells: int, tmesh: ArrayFloat1D) -> int: + """Size of monitor storage given the number of points after discretization. + In general, this is severely overestimated for surface monitors. + """ + + # estimation based on triangulated surface when it crosses cells in xy plane + num_tris = num_cells * 6 + num_points = num_cells * 4 + + # storing 3 coordinate components per point + storage = 3 * BYTES_REAL * num_points + + # storing 3 indices per triangle + storage += 3 * BYTES_REAL * num_tris + + # EH field values + normal field + storage += ( + BYTES_COMPLEX * num_points * len(self.freqs) * len(self.fields) * 3 + + 3 * num_points * BYTES_REAL + ) + + return storage + + def _storage_size_solver(self, num_cells: int, tmesh: ArrayFloat1D) -> int: + """Size of intermediate data recorded by the monitor during a solver run.""" + + # fields + storage = BYTES_COMPLEX * num_cells * len(self.freqs) * len(self.fields) * 3 + + # fields valid map + storage += BYTES_REAL * num_cells * len(self.freqs) * len(self.fields) * 3 + + # auxiliary variables (normals and locations) + storage += BYTES_REAL * num_cells * 7 * 4 + + return storage + + +class SurfaceFieldTimeMonitor(AbstractSurfaceMonitor, TimeMonitor): + """:class:`Monitor` that records electromagnetic fields in the time domain on PEC surfaces. + + Notes + ----- + + :class:`SurfaceFieldTimeMonitor` objects are best used to monitor the time dependence of the fields + on a PEC surface. They can also be used to create “animations” of the field pattern evolution. + + To create an animation, we need to capture the frames at different time instances of the simulation. This can + be done by using a :class:`SurfaceFieldTimeMonitor`. Usually a FDTD simulation contains a large number of time steps + and grid points. Recording the field at every time step and grid point will result in a large dataset. For + the purpose of making animations, this is usually unnecessary. + + + Example + ------- + >>> import tidy3d as td + >>> old_logging_level = td.config.logging_level + >>> td.config.logging_level = "ERROR" + >>> monitor = SurfaceFieldTimeMonitor( + ... center=(1,2,3), + ... size=(2,2,2), + ... fields=['H'], + ... start=1e-13, + ... stop=5e-13, + ... interval=2, + ... name='movie_monitor') + >>> td.config.logging_level = old_logging_level + """ + + def storage_size(self, num_cells: int, tmesh: ArrayFloat1D) -> int: + """Size of monitor storage given the number of points after discretization. + In general, this is severely overestimated for surface monitors. + """ + num_steps = self.num_steps(tmesh) + + # estimation based on triangulated surface when it crosses cells in xy plane + num_tris = num_cells * 6 + num_points = num_cells * 4 + + # storing 3 coordinate components per point + storage = 3 * BYTES_REAL * num_points + + # storing 3 indices per triangle + storage += 3 * BYTES_REAL * num_tris + + # EH field values + normal field + storage += ( + BYTES_COMPLEX * num_points * num_steps * len(self.fields) * 3 + + 3 * num_points * BYTES_REAL + ) + + return storage + + def _storage_size_solver(self, num_cells: int, tmesh: ArrayFloat1D) -> int: + """Size of intermediate data recorded by the monitor during a solver run.""" + + num_steps = self.num_steps(tmesh) + + # fields + storage = BYTES_COMPLEX * num_cells * num_steps * len(self.fields) * 3 + + # fields valid map + storage += BYTES_REAL * num_cells * num_steps * len(self.fields) * 3 + + # auxiliary variables (normals and locations) + storage += BYTES_REAL * num_cells * 7 * 4 + + return storage + + +SurfaceMonitorType = Union[ + SurfaceFieldMonitor, + SurfaceFieldTimeMonitor, +] + + # types of monitors that are accepted by simulation MonitorType = Union[ FieldMonitor, @@ -1592,4 +1769,6 @@ def _storage_size_solver(self, num_cells: int, tmesh: ArrayFloat1D) -> int: FieldProjectionKSpaceMonitor, DiffractionMonitor, DirectivityMonitor, + SurfaceFieldMonitor, + SurfaceFieldTimeMonitor, ] diff --git a/tidy3d/components/simulation.py b/tidy3d/components/simulation.py index 4dbde67f38..4223e21291 100644 --- a/tidy3d/components/simulation.py +++ b/tidy3d/components/simulation.py @@ -69,6 +69,7 @@ MediumType, MediumType3D, PECMedium, + _is_pec_like, ) from .monitor import ( AbstractFieldProjectionMonitor, @@ -89,6 +90,7 @@ MonitorType, PermittivityMonitor, SurfaceIntegrationMonitor, + SurfaceMonitorType, TimeMonitor, ) from .run_time_spec import RunTimeSpec @@ -115,6 +117,8 @@ ArrayFloat2D, Ax, Axis, + Bound, + Coordinate, CoordinateOptional, FreqBound, InterpMethod, @@ -3928,6 +3932,68 @@ def diffraction_and_directivity_monitor_medium(cls, val, values): raise SetupError(f"'{monitor.type}' must not lie in a lossy medium.") return val + @classmethod + def _get_surface_monitor_bounds( + cls, + center: Coordinate, + size: Coordinate, + monitor: SurfaceMonitorType, + medium: MediumType3D, + structures: list[Structure], + ) -> list[Bound]: + """Intersect a surface monitor with the bounding box of each PEC structure.""" + + sim_box = Box(center=center, size=size) + mnt_bounds = Box.bounds_intersection(monitor.bounds, sim_box.bounds) + + if _is_pec_like(medium): + return [mnt_bounds] + + bounds = [] + for structure in structures: + if _is_pec_like(structure.medium): + intersection_bounds = Box.bounds_intersection(mnt_bounds, structure.geometry.bounds) + if all(bmin <= bmax for bmin, bmax in zip(*intersection_bounds)): + bounds.append(intersection_bounds) + + return bounds + + def _get_surface_monitor_bounds_self(self, monitor: SurfaceMonitorType) -> list[Bound]: + """Intersect a surface monitor with the bounding box of each PEC structure.""" + + sim_box = Box(center=self.center, size=self.size) + mnt_bounds = Box.bounds_intersection(monitor.bounds, sim_box.bounds) + + if _is_pec_like(self.medium): + return [mnt_bounds] + + bounds = [] + for structure in self.structures: + if _is_pec_like(structure.medium): + intersection_bounds = Box.bounds_intersection(mnt_bounds, structure.geometry.bounds) + if all(bmin <= bmax for bmin, bmax in zip(*intersection_bounds)): + bounds.append(intersection_bounds) + + return bounds + + @pydantic.validator("monitors", always=True) + @skip_if_fields_missing(["medium", "structures", "size", "medium"]) + def error_empty_surface_monitor(cls, val, values): + """Error if any surface monitor does not at least cross a bounding box of a PEC/LossyMetal structure.""" + monitors = val + center = values.get("center") + size = values.get("size") + structures = values.get("structures") + medium = values.get("medium") + for mnt in monitors: + if isinstance(mnt, SurfaceMonitorType): + bounds = cls._get_surface_monitor_bounds(center, size, mnt, medium, structures) + if len(bounds) == 0: + raise SetupError( + f"Surface monitor {mnt.name} does not cross any PEC of LossyMetalMedium structures." + ) + return val + @pydantic.validator("grid_spec", always=True) @skip_if_fields_missing(["medium", "sources", "structures"]) def _warn_grid_size_too_small(cls, val, values): diff --git a/tidy3d/components/tcad/data/monitor_data/abstract.py b/tidy3d/components/tcad/data/monitor_data/abstract.py index 147141974d..687347b216 100644 --- a/tidy3d/components/tcad/data/monitor_data/abstract.py +++ b/tidy3d/components/tcad/data/monitor_data/abstract.py @@ -2,14 +2,12 @@ from __future__ import annotations -import copy from abc import ABC, abstractmethod from typing import Union -import numpy as np import pydantic.v1 as pd -from tidy3d.components.base_sim.data.monitor_data import AbstractMonitorData +from tidy3d.components.base_sim.data.monitor_data import AbstractUnstructuredMonitorData from tidy3d.components.data.data_array import ( SpatialDataArray, ) @@ -17,8 +15,7 @@ from tidy3d.components.tcad.types import ( HeatChargeMonitorType, ) -from tidy3d.components.types import Coordinate, ScalarSymmetry, annotate_type -from tidy3d.constants import MICROMETER +from tidy3d.components.types import ScalarSymmetry, annotate_type from tidy3d.log import log FieldDataset = Union[ @@ -27,7 +24,7 @@ UnstructuredFieldType = Union[TriangularGridDataset, TetrahedralGridDataset] -class HeatChargeMonitorData(AbstractMonitorData, ABC): +class HeatChargeMonitorData(AbstractUnstructuredMonitorData, ABC): """Abstract base class of objects that store data pertaining to a single :class:`HeatChargeMonitor`.""" monitor: HeatChargeMonitorType = pd.Field( @@ -42,13 +39,6 @@ class HeatChargeMonitorData(AbstractMonitorData, ABC): description="Symmetry of the original simulation in x, y, and z.", ) - symmetry_center: Coordinate = pd.Field( - (0, 0, 0), - title="Symmetry Center", - description="Symmetry center of the original simulation in x, y, and z.", - units=MICROMETER, - ) - @abstractmethod def field_components(self) -> dict: """Maps the field components to their associated data.""" @@ -74,70 +64,6 @@ def symmetry_expanded_copy(self) -> HeatChargeMonitorData: return self.updated_copy(symmetry=(0, 0, 0), **new_field_components) - def _symmetry_expanded_copy_base(self, property: FieldDataset) -> FieldDataset: - """Return the property with symmetry applied.""" - - # no symmetry - if all(sym == 0 for sym in self.symmetry): - return property - - new_property = copy.copy(property) - - mnt_bounds = np.array(self.monitor.bounds) - - if isinstance(new_property, SpatialDataArray): - data_bounds = [ - [np.min(new_property.x), np.min(new_property.y), np.min(new_property.z)], - [np.max(new_property.x), np.max(new_property.y), np.max(new_property.z)], - ] - else: - data_bounds = new_property.bounds - - dims_need_clipping_left = [] - dims_need_clipping_right = [] - for dim in range(3): - # do not expand monitor with zero size along symmetry direction - # this is done because 2d unstructured data does not support this - if self.symmetry[dim] == 1: - center = self.symmetry_center[dim] - - if mnt_bounds[1][dim] < data_bounds[0][dim]: - # (note that mnt_bounds[0][dim] < 2 * center - data_bounds[0][dim] will be satisfied based on backend behavior) - # simple reflection - new_property = new_property.reflect( - axis=dim, center=center, reflection_only=True - ) - elif mnt_bounds[0][dim] < 2 * center - data_bounds[0][dim]: - # expand only if monitor bounds missing data - # if we do expand, simply reflect symmetrically the whole data - new_property = new_property.reflect(axis=dim, center=center) - - # if it turns out that we expanded too much, we will trim unnecessary data later - if mnt_bounds[0][dim] > 2 * center - data_bounds[1][dim]: - dims_need_clipping_left.append(dim) - - # likewise, if some of original data was only for symmetry expansion, thim excess on the right - if mnt_bounds[1][dim] < data_bounds[1][dim]: - dims_need_clipping_right.append(dim) - - # trim over-expanded data - if len(dims_need_clipping_left) > 0 or len(dims_need_clipping_right) > 0: - # enlarge clipping domain on positive side arbitrary by 1 - # should not matter by how much - clip_bounds = [mnt_bounds[0] - 1, mnt_bounds[1] + 1] - for dim in dims_need_clipping_left: - clip_bounds[0][dim] = mnt_bounds[0][dim] - - for dim in dims_need_clipping_right: - clip_bounds[1][dim] = mnt_bounds[1][dim] - - if isinstance(new_property, SpatialDataArray): - new_property = new_property.sel_inside(clip_bounds) - else: - new_property = new_property.box_clip(bounds=clip_bounds) - - return new_property - def _post_init_validators(self): """Call validators taking ``self`` that get run after init.""" # validate that data exists for all fields diff --git a/tidy3d/components/types/__init__.py b/tidy3d/components/types/__init__.py index 90dd0dc84d..b24f1bb248 100644 --- a/tidy3d/components/types/__init__.py +++ b/tidy3d/components/types/__init__.py @@ -29,6 +29,7 @@ CoordinateOptional, Direction, EMField, + EMSurfaceField, EpsSpecType, FieldType, FieldVal, @@ -97,6 +98,7 @@ "CoordinateOptional", "Direction", "EMField", + "EMSurfaceField", "EpsSpecType", "FieldType", "FieldVal", diff --git a/tidy3d/components/types/base.py b/tidy3d/components/types/base.py index 60c8c8ea03..5e2f02d45d 100644 --- a/tidy3d/components/types/base.py +++ b/tidy3d/components/types/base.py @@ -226,6 +226,7 @@ def __modify_schema__(cls, field_schema): """ monitors """ EMField = Literal["Ex", "Ey", "Ez", "Hx", "Hy", "Hz"] +EMSurfaceField = Literal["E", "H"] FieldType = Literal["Ex", "Ey", "Ez", "Hx", "Hy", "Hz"] FreqArray = Union[tuple[float, ...], ArrayFloat1D] ObsGridArray = Union[tuple[float, ...], ArrayFloat1D] diff --git a/tidy3d/components/viz/__init__.py b/tidy3d/components/viz/__init__.py index bbc533144f..43a2872b68 100644 --- a/tidy3d/components/viz/__init__.py +++ b/tidy3d/components/viz/__init__.py @@ -1,6 +1,13 @@ from __future__ import annotations -from .axes_utils import add_ax_if_none, equal_aspect, make_ax, set_default_labels_and_title +from .axes_utils import ( + add_ax_3d_if_none, + add_ax_if_none, + add_plotter_if_none, + equal_aspect, + make_ax, + set_default_labels_and_title, +) from .descartes import Polygon, polygon_patch, polygon_path from .flex_style import apply_tidy3d_params, restore_matplotlib_rcparams from .plot_params import ( @@ -62,7 +69,9 @@ "PlotParams", "Polygon", "VisualizationSpec", + "add_ax_3d_if_none", "add_ax_if_none", + "add_plotter_if_none", "arrow_style", "equal_aspect", "make_ax", diff --git a/tidy3d/components/viz/axes_utils.py b/tidy3d/components/viz/axes_utils.py index 85007e773c..2c306e229b 100644 --- a/tidy3d/components/viz/axes_utils.py +++ b/tidy3d/components/viz/axes_utils.py @@ -6,6 +6,7 @@ from tidy3d.components.types import Ax, Axis, LengthUnit from tidy3d.constants import UnitScaling from tidy3d.exceptions import Tidy3dKeyError +from tidy3d.packaging import pyvista def _create_unit_aware_locator(): @@ -105,6 +106,14 @@ def make_ax() -> Ax: return ax +def make_ax_3d() -> Ax: + """makes an empty ``ax`` with 3d projection.""" + import matplotlib.pyplot as plt + + _, ax = plt.subplots(1, 1, tight_layout=True, subplot_kw={"projection": "3d"}) + return ax + + def add_ax_if_none(plot): """Decorates ``plot(*args, **kwargs, ax=None)`` function. if ax=None in the function call, creates an ax and feeds it to rest of function. @@ -121,6 +130,22 @@ def _plot(*args, **kwargs) -> Ax: return _plot +def add_ax_3d_if_none(plot): + """Decorates ``plot(*args, **kwargs, ax=None)`` function. + if ax=None in the function call, creates an ax with 3d projection and feeds it to rest of function. + """ + + @wraps(plot) + def _plot(*args, **kwargs) -> Ax: + """New plot function using a generated ax if None.""" + if kwargs.get("ax") is None: + ax = make_ax_3d() + kwargs["ax"] = ax + return plot(*args, **kwargs) + + return _plot + + def equal_aspect(plot): """Decorates a plotting function returning a matplotlib axes. Ensures the aspect ratio of the returned axes is set to equal. @@ -137,6 +162,82 @@ def _plot(*args, **kwargs) -> Ax: return _plot +def _is_notebook() -> bool: + """Detect if running in Jupyter notebook. + + Returns + ------- + bool + True if running in Jupyter notebook, False otherwise. + """ + try: + from IPython import get_ipython + + ipython = get_ipython() + if ipython is None: + return False + return "IPKernelApp" in ipython.config + except (ImportError, AttributeError): + return False + + +def add_plotter_if_none(plot): + """Decorates ``plot(*args, **kwargs, plotter=None)`` function for PyVista. + If plotter=None in the function call, creates a plotter and feeds it to rest of function. + + The wrapped function should accept 'plotter' as first argument after self. + The wrapped function should return the plotter object. + + This decorator will: + - Auto-detect notebook environment (or use windowed parameter) + - Create plotter if None + - Call plotter.show() at the end if show=True and plotter was created here + - Return the plotter object or show() result + + Parameters handled by decorator: + - plotter: If None, creates new plotter + - show: If True and plotter was created, calls plotter.show() + - windowed: If None, auto-detects. If True, forces external window. + - window_size: Tuple for window dimensions (only when creating plotter) + """ + + @wraps(plot) + def _plot(*args, **kwargs): + """New plot function using a generated plotter if None.""" + # Extract decorator-specific parameters + plotter = kwargs.get("plotter") + show = kwargs.pop("show", True) + windowed = kwargs.pop("windowed", None) + window_size = kwargs.pop("window_size", (800, 600)) + + # Determine display mode + if windowed is None: + # Auto-detect: windowed=False (inline) in notebooks, True otherwise + windowed = not _is_notebook() + + # Track if we created the plotter + plotter_created = plotter is None + + # Create plotter if not provided + if plotter is None: + pv = pyvista["mod"] + # PyVista uses 'notebook' parameter (True=inline, False=window) + # Our 'windowed' is opposite: True=window, False=inline + plotter = pv.Plotter(notebook=not windowed, window_size=window_size) + kwargs["plotter"] = plotter + + # Call the wrapped function + plotter = plot(*args, **kwargs) + + # Show if we created the plotter and show=True + if plotter_created and show: + return plotter.show() + + return plotter + + return _plot + + def set_default_labels_and_title( axis_labels: tuple[str, str], axis: Axis, diff --git a/tidy3d/packaging.py b/tidy3d/packaging.py index b0b7ede808..eeed6f6580 100644 --- a/tidy3d/packaging.py +++ b/tidy3d/packaging.py @@ -25,6 +25,8 @@ "numpy_to_vtk": None, } +pyvista = {"mod": None} + tidy3d_extras = {"mod": None, "use_local_subpixel": None} @@ -159,6 +161,29 @@ def _fn(*args, **kwargs): return _fn +def requires_pyvista(fn): + """When decorating a method, requires that pyvista is available.""" + + @functools.wraps(fn) + def _fn(*args, **kwargs): + if pyvista["mod"] is None: + try: + import pyvista as pv + + pyvista["mod"] = pv + + except ImportError as exc: + raise Tidy3dImportError( + "The package 'pyvista' is required for this operation, but it was not found. " + "Please install the 'pyvista' dependencies using, for example, " + "'pip install pyvista' or 'pip install tidy3d[pyvista]'." + ) from exc + + return fn(*args, **kwargs) + + return _fn + + def get_numpy_major_version(module=np): """ Extracts the major version of the installed numpy accordingly. From 0c36d6d954e63fcdd74267607d3b541f40224b32 Mon Sep 17 00:00:00 2001 From: dbochkov-flexcompute Date: Tue, 14 Oct 2025 15:34:11 -0700 Subject: [PATCH 2/7] greptile comments --- tidy3d/components/base_sim/data/monitor_data.py | 2 +- tidy3d/components/data/data_array.py | 10 +++++----- tidy3d/components/data/monitor_data.py | 2 +- tidy3d/components/data/unstructured/base.py | 6 +++--- tidy3d/components/data/unstructured/triangular.py | 2 +- tidy3d/components/simulation.py | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tidy3d/components/base_sim/data/monitor_data.py b/tidy3d/components/base_sim/data/monitor_data.py index fd6453c04e..3b87dc3d8f 100644 --- a/tidy3d/components/base_sim/data/monitor_data.py +++ b/tidy3d/components/base_sim/data/monitor_data.py @@ -100,7 +100,7 @@ def _symmetry_expanded_copy_base( if mnt_bounds[0][dim] > 2 * center - data_bounds[1][dim]: dims_need_clipping_left.append(dim) - # likewise, if some of original data was only for symmetry expansion, thim excess on the right + # likewise, if some of original data was only for symmetry expansion, trim excess on the right if mnt_bounds[1][dim] < data_bounds[1][dim]: dims_need_clipping_right.append(dim) diff --git a/tidy3d/components/data/data_array.py b/tidy3d/components/data/data_array.py index df1226eeb8..598418d875 100644 --- a/tidy3d/components/data/data_array.py +++ b/tidy3d/components/data/data_array.py @@ -1544,7 +1544,7 @@ class IndexedSurfaceTimeDataArray(DataArray): Example ------- >>> surface_side_array = IndexedSurfaceTimeDataArray( - ... (1+1j) * np.random.random((4,2,1)), coords=dict(index=np.arange(4), side=["outside", "inside"], f=[1e9]) + ... (1+1j) * np.random.random((4,2,1)), coords=dict(index=np.arange(4), side=["outside", "inside"], t=[1e9]) ... ) """ @@ -1559,7 +1559,7 @@ class IndexedFieldDataArray(DataArray): Example ------- >>> indexed_array = IndexedFieldDataArray( - ... (1+1j) * np.random.random((4,3,1)), coords=dict(index=np.arange(4), axis=np.arange(3), f=[1e9]) + ... (1+1j) * np.random.random((4,2,3,1)), coords=dict(index=np.arange(4), side=["outside", "inside"], axis=np.arange(3), f=[1e9]) ... ) """ @@ -1573,8 +1573,8 @@ class IndexedFieldTimeDataArray(DataArray): Example ------- - >>> indexed_array = IndexedFieldDataArray( - ... (1+1j) * np.random.random((4,3,1)), coords=dict(index=np.arange(4), axis=np.arange(3), t=[0]) + >>> indexed_array = IndexedFieldTimeDataArray( + ... (1+1j) * np.random.random((4,2,3,1)), coords=dict(index=np.arange(4), side=["outside", "inside"], axis=np.arange(3), t=[0]) ... ) """ @@ -1588,7 +1588,7 @@ class IndexedFreqDataArray(DataArray): Example ------- - >>> indexed_array = IndexedFieldDataArray( + >>> indexed_array = IndexedFreqDataArray( ... (1+1j) * np.random.random((4,1)), coords=dict(index=np.arange(4), f=[1e9]) ... ) """ diff --git a/tidy3d/components/data/monitor_data.py b/tidy3d/components/data/monitor_data.py index 47a4d9a7c0..1b3b62777f 100644 --- a/tidy3d/components/data/monitor_data.py +++ b/tidy3d/components/data/monitor_data.py @@ -1724,7 +1724,7 @@ def poynting(self) -> TriangularSurfaceDataset: h_field = self.H poynting = e_field.updated_copy( - values=np.real(xr.cross(e_field.values, np.conj(h_field.values), dim="axis")) + values=np.real(xr.cross(np.real(e_field.values), np.real(h_field.values), dim="axis")) ) return poynting diff --git a/tidy3d/components/data/unstructured/base.py b/tidy3d/components/data/unstructured/base.py index 2eb874b200..29ec3fb937 100644 --- a/tidy3d/components/data/unstructured/base.py +++ b/tidy3d/components/data/unstructured/base.py @@ -21,7 +21,7 @@ SpatialDataArray, ) from tidy3d.components.types import ArrayLike, Axis, Bound -from tidy3d.constants import inf +from tidy3d.constants import fp_eps, inf from tidy3d.exceptions import DataError, Tidy3dNotImplementedError, ValidationError from tidy3d.log import log from tidy3d.packaging import requires_vtk, vtk @@ -989,8 +989,8 @@ def reflect( points_off_plane_map[self._boundary_points_indices] = ~np.isclose( self.points.sel(axis=axis).data[self._boundary_points_indices], center, - atol=1e-4, - rtol=1e-4, + atol=fp_eps, + rtol=fp_eps, ) num_new_points = np.sum(points_off_plane_map) diff --git a/tidy3d/components/data/unstructured/triangular.py b/tidy3d/components/data/unstructured/triangular.py index b08b53c328..2b7c87341b 100644 --- a/tidy3d/components/data/unstructured/triangular.py +++ b/tidy3d/components/data/unstructured/triangular.py @@ -279,7 +279,7 @@ def reflect( ) -> UnstructuredGridDataset: """Reflect unstructured data across the plane define by parameters ``axis`` and ``center``. By default the original data is preserved, setting ``reflection_only`` to ``True`` will - produce only deflected data. + produce only reflected data. Parameters ---------- diff --git a/tidy3d/components/simulation.py b/tidy3d/components/simulation.py index 4223e21291..c1477cb739 100644 --- a/tidy3d/components/simulation.py +++ b/tidy3d/components/simulation.py @@ -3990,7 +3990,7 @@ def error_empty_surface_monitor(cls, val, values): bounds = cls._get_surface_monitor_bounds(center, size, mnt, medium, structures) if len(bounds) == 0: raise SetupError( - f"Surface monitor {mnt.name} does not cross any PEC of LossyMetalMedium structures." + f"Surface monitor {mnt.name} does not cross any PEC or LossyMetalMedium structures." ) return val From e61c05abeb6806d177879a92d31307e695b1a586 Mon Sep 17 00:00:00 2001 From: dbochkov-flexcompute Date: Tue, 14 Oct 2025 15:37:00 -0700 Subject: [PATCH 3/7] schemas --- schemas/EMESimulation.json | 110 ++++++ schemas/HeatChargeSimulation.json | 110 ++++++ schemas/HeatSimulation.json | 110 ++++++ schemas/ModeSimulation.json | 110 ++++++ schemas/Simulation.json | 505 +++++++++++++++++++++++++- schemas/TerminalComponentModeler.json | 505 +++++++++++++++++++++++++- 6 files changed, 1448 insertions(+), 2 deletions(-) diff --git a/schemas/EMESimulation.json b/schemas/EMESimulation.json index df1713e027..133d640610 100644 --- a/schemas/EMESimulation.json +++ b/schemas/EMESimulation.json @@ -10875,6 +10875,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { @@ -11127,6 +11182,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { diff --git a/schemas/HeatChargeSimulation.json b/schemas/HeatChargeSimulation.json index 34c1e3883f..a07020e07d 100644 --- a/schemas/HeatChargeSimulation.json +++ b/schemas/HeatChargeSimulation.json @@ -8919,6 +8919,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { @@ -9171,6 +9226,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { diff --git a/schemas/HeatSimulation.json b/schemas/HeatSimulation.json index 8a6eacbd79..f239de42cb 100644 --- a/schemas/HeatSimulation.json +++ b/schemas/HeatSimulation.json @@ -8919,6 +8919,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { @@ -9171,6 +9226,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { diff --git a/schemas/ModeSimulation.json b/schemas/ModeSimulation.json index ff8fac73e2..be78192d39 100644 --- a/schemas/ModeSimulation.json +++ b/schemas/ModeSimulation.json @@ -10541,6 +10541,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { @@ -10824,6 +10879,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { diff --git a/schemas/Simulation.json b/schemas/Simulation.json index 0e4e86b99c..c6ef522d8f 100644 --- a/schemas/Simulation.json +++ b/schemas/Simulation.json @@ -14284,6 +14284,391 @@ }, "type": "object" }, + "SurfaceFieldMonitor": { + "additionalProperties": false, + "properties": { + "apodization": { + "allOf": [ + { + "$ref": "#/definitions/ApodizationSpec" + } + ], + "default": { + "attrs": {}, + "end": null, + "start": null, + "type": "ApodizationSpec", + "width": null + } + }, + "attrs": { + "default": {}, + "type": "object" + }, + "center": { + "anyOf": [ + { + "items": [ + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + }, + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + }, + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + { + "type": "autograd.tracer.Box" + } + ], + "default": [ + 0.0, + 0.0, + 0.0 + ] + }, + "colocate": { + "default": false, + "enum": [ + false + ], + "type": "boolean" + }, + "fields": { + "default": [ + "E", + "H" + ], + "items": { + "enum": [ + "E", + "H" + ], + "type": "string" + }, + "type": "array" + }, + "freqs": { + "anyOf": [ + { + "items": { + "type": "number" + }, + "type": "array" + }, + { + "type": "ArrayLike" + } + ] + }, + "interval_space": { + "default": [ + 1, + 1, + 1 + ], + "items": [ + { + "enum": [ + 1 + ], + "type": "integer" + }, + { + "enum": [ + 1 + ], + "type": "integer" + }, + { + "enum": [ + 1 + ], + "type": "integer" + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + "name": { + "minLength": 1, + "type": "string" + }, + "size": { + "anyOf": [ + { + "items": [ + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + "type": { + "default": "SurfaceFieldMonitor", + "enum": [ + "SurfaceFieldMonitor" + ], + "type": "string" + } + }, + "required": [ + "freqs", + "name", + "size" + ], + "type": "object" + }, + "SurfaceFieldTimeMonitor": { + "additionalProperties": false, + "properties": { + "attrs": { + "default": {}, + "type": "object" + }, + "center": { + "anyOf": [ + { + "items": [ + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + }, + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + }, + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + { + "type": "autograd.tracer.Box" + } + ], + "default": [ + 0.0, + 0.0, + 0.0 + ] + }, + "colocate": { + "default": false, + "enum": [ + false + ], + "type": "boolean" + }, + "fields": { + "default": [ + "E", + "H" + ], + "items": { + "enum": [ + "E", + "H" + ], + "type": "string" + }, + "type": "array" + }, + "interval": { + "exclusiveMinimum": 0, + "type": "integer" + }, + "interval_space": { + "default": [ + 1, + 1, + 1 + ], + "items": [ + { + "enum": [ + 1 + ], + "type": "integer" + }, + { + "enum": [ + 1 + ], + "type": "integer" + }, + { + "enum": [ + 1 + ], + "type": "integer" + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + "name": { + "minLength": 1, + "type": "string" + }, + "size": { + "anyOf": [ + { + "items": [ + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + "start": { + "default": 0.0, + "minimum": 0, + "type": "number" + }, + "stop": { + "minimum": 0, + "type": "number" + }, + "type": { + "default": "SurfaceFieldTimeMonitor", + "enum": [ + "SurfaceFieldTimeMonitor" + ], + "type": "string" + } + }, + "required": [ + "name", + "size" + ], + "type": "object" + }, "SurfaceImpedance": { "additionalProperties": false, "properties": { @@ -14561,6 +14946,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { @@ -14844,6 +15284,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { @@ -15684,7 +16179,9 @@ "MediumMonitor": "#/definitions/MediumMonitor", "ModeMonitor": "#/definitions/ModeMonitor", "ModeSolverMonitor": "#/definitions/ModeSolverMonitor", - "PermittivityMonitor": "#/definitions/PermittivityMonitor" + "PermittivityMonitor": "#/definitions/PermittivityMonitor", + "SurfaceFieldMonitor": "#/definitions/SurfaceFieldMonitor", + "SurfaceFieldTimeMonitor": "#/definitions/SurfaceFieldTimeMonitor" }, "propertyName": "type" }, @@ -15730,6 +16227,12 @@ }, { "$ref": "#/definitions/PermittivityMonitor" + }, + { + "$ref": "#/definitions/SurfaceFieldMonitor" + }, + { + "$ref": "#/definitions/SurfaceFieldTimeMonitor" } ] }, diff --git a/schemas/TerminalComponentModeler.json b/schemas/TerminalComponentModeler.json index 8fe6ce3d51..ee13afa169 100644 --- a/schemas/TerminalComponentModeler.json +++ b/schemas/TerminalComponentModeler.json @@ -14385,7 +14385,9 @@ "MediumMonitor": "#/definitions/MediumMonitor", "ModeMonitor": "#/definitions/ModeMonitor", "ModeSolverMonitor": "#/definitions/ModeSolverMonitor", - "PermittivityMonitor": "#/definitions/PermittivityMonitor" + "PermittivityMonitor": "#/definitions/PermittivityMonitor", + "SurfaceFieldMonitor": "#/definitions/SurfaceFieldMonitor", + "SurfaceFieldTimeMonitor": "#/definitions/SurfaceFieldTimeMonitor" }, "propertyName": "type" }, @@ -14431,6 +14433,12 @@ }, { "$ref": "#/definitions/PermittivityMonitor" + }, + { + "$ref": "#/definitions/SurfaceFieldMonitor" + }, + { + "$ref": "#/definitions/SurfaceFieldTimeMonitor" } ] }, @@ -15494,6 +15502,391 @@ }, "type": "object" }, + "SurfaceFieldMonitor": { + "additionalProperties": false, + "properties": { + "apodization": { + "allOf": [ + { + "$ref": "#/definitions/ApodizationSpec" + } + ], + "default": { + "attrs": {}, + "end": null, + "start": null, + "type": "ApodizationSpec", + "width": null + } + }, + "attrs": { + "default": {}, + "type": "object" + }, + "center": { + "anyOf": [ + { + "items": [ + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + }, + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + }, + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + { + "type": "autograd.tracer.Box" + } + ], + "default": [ + 0.0, + 0.0, + 0.0 + ] + }, + "colocate": { + "default": false, + "enum": [ + false + ], + "type": "boolean" + }, + "fields": { + "default": [ + "E", + "H" + ], + "items": { + "enum": [ + "E", + "H" + ], + "type": "string" + }, + "type": "array" + }, + "freqs": { + "anyOf": [ + { + "items": { + "type": "number" + }, + "type": "array" + }, + { + "type": "ArrayLike" + } + ] + }, + "interval_space": { + "default": [ + 1, + 1, + 1 + ], + "items": [ + { + "enum": [ + 1 + ], + "type": "integer" + }, + { + "enum": [ + 1 + ], + "type": "integer" + }, + { + "enum": [ + 1 + ], + "type": "integer" + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + "name": { + "minLength": 1, + "type": "string" + }, + "size": { + "anyOf": [ + { + "items": [ + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + "type": { + "default": "SurfaceFieldMonitor", + "enum": [ + "SurfaceFieldMonitor" + ], + "type": "string" + } + }, + "required": [ + "freqs", + "name", + "size" + ], + "type": "object" + }, + "SurfaceFieldTimeMonitor": { + "additionalProperties": false, + "properties": { + "attrs": { + "default": {}, + "type": "object" + }, + "center": { + "anyOf": [ + { + "items": [ + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + }, + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + }, + { + "anyOf": [ + { + "type": "autograd.tracer.Box" + }, + { + "type": "number" + } + ] + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + { + "type": "autograd.tracer.Box" + } + ], + "default": [ + 0.0, + 0.0, + 0.0 + ] + }, + "colocate": { + "default": false, + "enum": [ + false + ], + "type": "boolean" + }, + "fields": { + "default": [ + "E", + "H" + ], + "items": { + "enum": [ + "E", + "H" + ], + "type": "string" + }, + "type": "array" + }, + "interval": { + "exclusiveMinimum": 0, + "type": "integer" + }, + "interval_space": { + "default": [ + 1, + 1, + 1 + ], + "items": [ + { + "enum": [ + 1 + ], + "type": "integer" + }, + { + "enum": [ + 1 + ], + "type": "integer" + }, + { + "enum": [ + 1 + ], + "type": "integer" + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + "name": { + "minLength": 1, + "type": "string" + }, + "size": { + "anyOf": [ + { + "items": [ + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + { + "anyOf": [ + { + "minimum": 0, + "type": "number" + }, + { + "type": "autograd.tracer.Box" + } + ] + } + ], + "maxItems": 3, + "minItems": 3, + "type": "array" + }, + { + "type": "autograd.tracer.Box" + } + ] + }, + "start": { + "default": 0.0, + "minimum": 0, + "type": "number" + }, + "stop": { + "minimum": 0, + "type": "number" + }, + "type": { + "default": "SurfaceFieldTimeMonitor", + "enum": [ + "SurfaceFieldTimeMonitor" + ], + "type": "string" + } + }, + "required": [ + "name", + "size" + ], + "type": "object" + }, "SurfaceImpedance": { "additionalProperties": false, "properties": { @@ -15771,6 +16164,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { @@ -16054,6 +16502,61 @@ }, "values": { "anyOf": [ + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, + { + "properties": { + "_dims": { + "type": "Tuple[str, ...]" + } + }, + "required": [ + "_dims" + ], + "type": "xr.DataArray" + }, { "properties": { "_dims": { From d598caae27a918cdece0d9984050285d5bcab265 Mon Sep 17 00:00:00 2001 From: dbochkov-flexcompute Date: Tue, 14 Oct 2025 17:14:18 -0700 Subject: [PATCH 4/7] accidental fail --- tests/test_components/test_monitor.py | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/tests/test_components/test_monitor.py b/tests/test_components/test_monitor.py index f34251ab79..7853751c32 100644 --- a/tests/test_components/test_monitor.py +++ b/tests/test_components/test_monitor.py @@ -450,7 +450,13 @@ def test_monitor_surfaces_from_volume(): def test_surface_monitors(): pec_sphere = td.Structure(geometry=td.Sphere(radius=0.5), medium=td.PECMedium()) - surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") + # tests warning message at first instance of surface monitors + with AssertLogLevel("WARNING"): + surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") + + # no warning message at second instance of surface monitors + with AssertLogLevel(None): + surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") _ = td.Simulation( size=(2, 2, 2), @@ -473,15 +479,3 @@ def test_surface_monitors(): run_time=1e-12, grid_spec=td.GridSpec.auto(wavelength=1), ) - - # tests warning message - with AssertLogLevel("WARNING"): - surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") - - _ = td.Simulation( - size=(2, 2, 2), - structures=[pec_sphere], - monitors=[surf_mnt], - run_time=1e-12, - grid_spec=td.GridSpec.auto(wavelength=1), - ) From 576a2160dfff31d9905d27db6a63ce5e9eb6005f Mon Sep 17 00:00:00 2001 From: dbochkov-flexcompute Date: Wed, 15 Oct 2025 00:09:06 -0700 Subject: [PATCH 5/7] more of greptile comments and coverage --- tests/_test_data/_test_datasets_no_vtk.py | 14 +- .../_test_unstructured_pyvista_no_pyvista.py | 2 +- tests/test_components/test_monitor.py | 22 +- tests/test_data/test_data_arrays.py | 56 +++ tests/test_data/test_datasets.py | 397 +++++++++++++++--- tests/test_data/test_monitor_data.py | 167 ++++++++ tests/test_data/test_unstructured_pyvista.py | 27 ++ tidy3d/__init__.py | 5 +- .../components/base_sim/data/monitor_data.py | 32 +- tidy3d/components/data/data_array.py | 2 + tidy3d/components/data/dataset.py | 23 +- tidy3d/components/data/monitor_data.py | 40 +- tidy3d/components/data/unstructured/base.py | 130 ++++-- .../components/data/unstructured/surface.py | 24 +- .../data/unstructured/tetrahedral.py | 1 - tidy3d/components/monitor.py | 9 +- tidy3d/components/simulation.py | 2 +- .../tcad/data/monitor_data/abstract.py | 2 +- tidy3d/components/viz/__init__.py | 2 - tidy3d/components/viz/axes_utils.py | 24 -- 20 files changed, 803 insertions(+), 178 deletions(-) diff --git a/tests/_test_data/_test_datasets_no_vtk.py b/tests/_test_data/_test_datasets_no_vtk.py index 0e6866ac65..ad4a5ac3a4 100644 --- a/tests/_test_data/_test_datasets_no_vtk.py +++ b/tests/_test_data/_test_datasets_no_vtk.py @@ -6,8 +6,13 @@ import pytest +from tidy3d.components.data.data_array import IndexedDataArray + from ..test_data.test_datasets import test_tetrahedral_dataset as _test_tetrahedral_dataset from ..test_data.test_datasets import test_triangular_dataset as _test_triangular_dataset +from ..test_data.test_datasets import ( + test_triangular_surface_dataset as _test_triangular_surface_dataset, +) @pytest.fixture @@ -24,9 +29,14 @@ def mocked_import(name, *args, **kwargs): @pytest.mark.usefixtures("hide_vtk") def test_triangular_dataset_no_vtk(tmp_path): - _test_triangular_dataset(tmp_path, "test_name", 0, no_vtk=True) + _test_triangular_dataset(tmp_path, "test_name", IndexedDataArray, no_vtk=True) @pytest.mark.usefixtures("hide_vtk") def test_tetrahedral_dataset_no_vtk(tmp_path): - _test_tetrahedral_dataset(tmp_path, "test_name", 0, no_vtk=True) + _test_tetrahedral_dataset(tmp_path, "test_name", IndexedDataArray, no_vtk=True) + + +@pytest.mark.usefixtures("hide_vtk") +def test_triangular_surface_dataset_no_vtk(tmp_path): + _test_triangular_surface_dataset(tmp_path, "test_name", IndexedDataArray, no_vtk=True) diff --git a/tests/_test_data/_test_unstructured_pyvista_no_pyvista.py b/tests/_test_data/_test_unstructured_pyvista_no_pyvista.py index b1c2fde1c0..675c5070ba 100644 --- a/tests/_test_data/_test_unstructured_pyvista_no_pyvista.py +++ b/tests/_test_data/_test_unstructured_pyvista_no_pyvista.py @@ -45,7 +45,7 @@ @pytest.fixture -def hide_pyvista(monkeypatch, request): +def hide_pyvista(monkeypatch): """Hide pyvista module to test error handling when not installed.""" import_orig = builtins.__import__ diff --git a/tests/test_components/test_monitor.py b/tests/test_components/test_monitor.py index 7853751c32..255558353b 100644 --- a/tests/test_components/test_monitor.py +++ b/tests/test_components/test_monitor.py @@ -450,13 +450,7 @@ def test_monitor_surfaces_from_volume(): def test_surface_monitors(): pec_sphere = td.Structure(geometry=td.Sphere(radius=0.5), medium=td.PECMedium()) - # tests warning message at first instance of surface monitors - with AssertLogLevel("WARNING"): - surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") - - # no warning message at second instance of surface monitors - with AssertLogLevel(None): - surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") + surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") _ = td.Simulation( size=(2, 2, 2), @@ -466,6 +460,16 @@ def test_surface_monitors(): grid_spec=td.GridSpec.auto(wavelength=1), ) + # background is PEC with dielectric sphere + _ = td.Simulation( + size=(2, 2, 2), + medium=td.PECMedium(), + structures=[pec_sphere.updated_copy(medium=td.Medium())], + monitors=[surf_mnt], + run_time=1e-12, + grid_spec=td.GridSpec.auto(wavelength=1), + ) + # monitor doesn't overlap any pec structure with pytest.raises(pydantic.ValidationError): surf_mnt = td.SurfaceFieldMonitor( @@ -479,3 +483,7 @@ def test_surface_monitors(): run_time=1e-12, grid_spec=td.GridSpec.auto(wavelength=1), ) + + # monitor must be volumetric + with pytest.raises(pydantic.ValidationError): + surf_mnt = td.SurfaceFieldMonitor(size=(1, 0, 1), freqs=[td.C_0], name="surface") diff --git a/tests/test_data/test_data_arrays.py b/tests/test_data/test_data_arrays.py index 185ee4c2dc..325f2abd73 100644 --- a/tests/test_data/test_data_arrays.py +++ b/tests/test_data/test_data_arrays.py @@ -37,6 +37,7 @@ ] FIELDS = ("Ex", "Ey", "Ez", "Hx", "Hz") AUX_FIELDS = ("Nfz",) +SURFACE_FIELDS = ("E", "H") INTERVAL = 2 ORDERS_X = list(range(-1, 2)) ORDERS_Y = list(range(-2, 3)) @@ -87,6 +88,12 @@ theta=list(THETAS), proj_distance=PD, ) +SURFACE_FIELD_MONITOR = td.SurfaceFieldMonitor( + size=SIZE_3D, fields=SURFACE_FIELDS, name="surface_field", freqs=FREQS +) +SURFACE_FIELD_TIME_MONITOR = td.SurfaceFieldTimeMonitor( + size=SIZE_3D, fields=SURFACE_FIELDS, name="surface_field_time", interval=INTERVAL +) MONITORS = [ FIELD_MONITOR, @@ -99,6 +106,8 @@ FLUX_TIME_MONITOR, DIFFRACTION_MONITOR, DIRECTIVITY_MONITOR, + # SURFACE_FIELD_MONITOR, + # SURFACE_FIELD_TIME_MONITOR, ] GRID_SPEC = td.GridSpec(wavelength=2.0) @@ -233,6 +242,53 @@ def make_diffraction_data_array(): ) +def make_surface_triangular_dataset(): + """Create a mock triangular surface dataset for testing.""" + # Create simple triangular mesh points + points = td.PointDataArray([[0, 0, 0], [1, 0, 0], [0.5, 1, 0]], dims=["index", "axis"]) + # Define one triangle using the three points + cells = td.CellDataArray([[0, 1, 2]], dims=["cell_index", "vertex_index"]) + return points, cells + + +def make_surface_field_data_array(): + """Create surface field data array for testing.""" + points, cells = make_surface_triangular_dataset() + + # Create field values on both sides of surface for frequency domain + values = (1 + 1j) * np.random.random((3, 2, 3, len(FREQS))) # index, side, axis, f + field_values = td.IndexedFieldDataArray( + values, + coords={"index": [0, 1, 2], "side": ["outside", "inside"], "axis": [0, 1, 2], "f": FREQS}, + ) + + return td.TriangularSurfaceDataset(points=points, cells=cells, values=field_values) + + +def make_surface_field_time_data_array(): + """Create surface field time data array for testing.""" + points, cells = make_surface_triangular_dataset() + + # Create field values on both sides of surface for time domain + values = (1 + 1j) * np.random.random((3, 2, 3, len(TS))) # index, side, axis, t + field_values = td.IndexedFieldTimeDataArray( + values, + coords={"index": [0, 1, 2], "side": ["outside", "inside"], "axis": [0, 1, 2], "t": TS}, + ) + + return td.TriangularSurfaceDataset(points=points, cells=cells, values=field_values) + + +def make_surface_normal_data_array(): + """Create surface normal vector data array for testing.""" + points, cells = make_surface_triangular_dataset() + + # Simple normal vectors (pointing in z direction) + normal_values = td.PointDataArray([[0, 0, 1], [0, 0, 1], [0, 0, 1]], dims=["index", "axis"]) + + return td.TriangularSurfaceDataset(points=points, cells=cells, values=normal_values) + + """ Test that they work """ diff --git a/tests/test_data/test_datasets.py b/tests/test_data/test_datasets.py index b3602ff507..9f39af270e 100644 --- a/tests/test_data/test_datasets.py +++ b/tests/test_data/test_datasets.py @@ -1,37 +1,57 @@ -"""Tests tidy3d/components/tests//dataset.py""" +"""Tests tidy3d/components/data/unstructured""" from __future__ import annotations +from typing import get_args + import numpy as np import pydantic.v1 as pd import pytest from matplotlib import pyplot as plt +from tidy3d.components.data.data_array import IndexedDataArrayTypes + from ..utils import AssertLogLevel, cartesian_to_unstructured np.random.seed(4) - -@pytest.mark.parametrize("dataset_type_ind", [0, 1, 2]) +# Extract actual types from the Union for parametrization +INDEXED_DATA_ARRAY_TYPES = get_args(IndexedDataArrayTypes) + +# Map each indexed data array type to its extra dimensions +TYPE_TO_EXTRA_DIMS = {} +import tidy3d as td + +TYPE_TO_EXTRA_DIMS[td.IndexedDataArray] = {} +TYPE_TO_EXTRA_DIMS[td.IndexedVoltageDataArray] = {"voltage": [0, 1, 2]} +TYPE_TO_EXTRA_DIMS[td.IndexedTimeDataArray] = {"t": [0, 1, 2]} +TYPE_TO_EXTRA_DIMS[td.IndexedFreqDataArray] = {"f": [1e14, 2e14, 3e14]} +TYPE_TO_EXTRA_DIMS[td.IndexedFieldVoltageDataArray] = {"axis": [0, 1, 2], "voltage": [0, 1, 2]} +TYPE_TO_EXTRA_DIMS[td.IndexedSurfaceFreqDataArray] = { + "side": ["outside", "inside"], + "f": [1e14, 2e14, 3e14], +} +TYPE_TO_EXTRA_DIMS[td.IndexedSurfaceTimeDataArray] = {"side": ["outside", "inside"], "t": [0, 1, 2]} +TYPE_TO_EXTRA_DIMS[td.IndexedFieldDataArray] = { + "side": ["outside", "inside"], + "axis": [0, 1, 2], + "f": [1e14, 2e14, 3e14], +} +TYPE_TO_EXTRA_DIMS[td.IndexedFieldTimeDataArray] = { + "side": ["outside", "inside"], + "axis": [0, 1, 2], + "t": [0, 1, 2], +} +TYPE_TO_EXTRA_DIMS[td.PointDataArray] = {"axis": [0, 1, 2]} + + +@pytest.mark.parametrize("values_type", TYPE_TO_EXTRA_DIMS.keys()) @pytest.mark.parametrize("ds_name", ["test123", None]) -def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): +def test_triangular_dataset(tmp_path, ds_name, values_type, no_vtk=False): import tidy3d as td from tidy3d.exceptions import DataError, Tidy3dImportError - if dataset_type_ind == 0: - dataset_type = td.TriangularGridDataset - values_type = td.IndexedDataArray - extra_dims = {} - - if dataset_type_ind == 1: - dataset_type = td.TriangularGridDataset - values_type = td.IndexedVoltageDataArray - extra_dims = {"voltage": [0, 1, 2]} - - if dataset_type_ind == 2: - dataset_type = td.TriangularGridDataset - values_type = td.IndexedTimeDataArray - extra_dims = {"t": [0, 1, 2]} + extra_dims = TYPE_TO_EXTRA_DIMS[values_type] # basic create tri_grid_points = td.PointDataArray( @@ -50,7 +70,7 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): name=ds_name, ) - tri_grid = dataset_type( + tri_grid = td.TriangularGridDataset( normal_axis=1, normal_pos=0, points=tri_grid_points, @@ -68,7 +88,7 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): coords={"index": np.arange(4), "axis": np.arange(3)}, ) - _ = dataset_type( + _ = td.TriangularGridDataset( normal_axis=0, normal_pos=10, points=tri_grid_points_bad, @@ -83,7 +103,7 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): ) with AssertLogLevel("WARNING"): - tri_grid_with_degenerates = dataset_type( + tri_grid_with_degenerates = td.TriangularGridDataset( normal_axis=2, normal_pos=-3, points=tri_grid_points, @@ -118,7 +138,7 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): coords={"cell_index": np.arange(1), "vertex_index": np.arange(4)}, ) with pytest.raises(pd.ValidationError): - _ = dataset_type( + _ = td.TriangularGridDataset( normal_axis=2, normal_pos=-3, points=tri_grid_points, @@ -131,7 +151,7 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): coords={"cell_index": np.arange(2), "vertex_index": np.arange(3)}, ) with pytest.raises(pd.ValidationError): - _ = dataset_type( + _ = td.TriangularGridDataset( normal_axis=2, normal_pos=-3, points=tri_grid_points, @@ -145,7 +165,7 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): coords=dict(index=np.arange(3), **extra_dims), ) with pytest.raises(pd.ValidationError): - _ = dataset_type( + _ = td.TriangularGridDataset( normal_axis=0, normal_pos=0, points=tri_grid_points, @@ -243,7 +263,9 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): with pytest.raises(DataError): _ = tri_grid.plot() - tri_grid_one_field = tri_grid.isel(**{key: value[0] for key, value in extra_dims.items()}) + tri_grid_one_field = tri_grid.sel(**{key: value[0] for key, value in extra_dims.items()}) + else: + tri_grid_one_field = tri_grid # plotting _ = tri_grid_one_field.plot() @@ -290,7 +312,7 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): # writing/reading tri_grid.to_file(tmp_path / "tri_grid_test.hdf5") - tri_grid_loaded = dataset_type.from_file(tmp_path / "tri_grid_test.hdf5") + tri_grid_loaded = td.TriangularGridDataset.from_file(tmp_path / "tri_grid_test.hdf5") assert tri_grid == tri_grid_loaded # writing/reading .vtu @@ -298,25 +320,27 @@ def test_triangular_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): with pytest.raises(Tidy3dImportError): tri_grid.to_vtu(tmp_path / "tri_grid_test.vtu") with pytest.raises(Tidy3dImportError): - tri_grid_loaded = dataset_type.from_vtu(tmp_path / "tri_grid_test.vtu") + tri_grid_loaded = td.TriangularGridDataset.from_vtu(tmp_path / "tri_grid_test.vtu") else: tri_grid.to_vtu(tmp_path / "tri_grid_test.vtu") if len(extra_dims) == 0: - tri_grid_loaded = dataset_type.from_vtu(tmp_path / "tri_grid_test.vtu") + tri_grid_loaded = td.TriangularGridDataset.from_vtu(tmp_path / "tri_grid_test.vtu") assert tri_grid == tri_grid_loaded custom_name = "newname" tri_grid_renamed = tri_grid.rename(custom_name) tri_grid_renamed.to_vtu(tmp_path / "tri_grid_test.vtu") - tri_grid_loaded = dataset_type.from_vtu( + tri_grid_loaded = td.TriangularGridDataset.from_vtu( tmp_path / "tri_grid_test.vtu", field=custom_name ) assert tri_grid == tri_grid_loaded with pytest.raises(AttributeError): - tri_grid_loaded = dataset_type.from_vtu(tmp_path / "tri_grid_test.vtu", field="blah") + tri_grid_loaded = td.TriangularGridDataset.from_vtu( + tmp_path / "tri_grid_test.vtu", field="blah" + ) # test ariphmetic operations def operation(arr): @@ -328,27 +352,24 @@ def operation(arr): assert np.allclose(result.values, result_values) assert result.name == ds_name + # test conjugate + assert np.allclose(tri_grid.conj().values, np.conjugate(tri_grid.values)) + + # test norm + if "axis" in extra_dims: + axis_index = 1 + list(extra_dims.keys()).index("axis") + assert np.allclose( + tri_grid.norm(dim="axis").values, np.linalg.norm(tri_grid.values, axis=axis_index) + ) -@pytest.mark.parametrize("dataset_type_ind", [0, 1, 2]) + +@pytest.mark.parametrize("values_type", TYPE_TO_EXTRA_DIMS.keys()) @pytest.mark.parametrize("ds_name", ["test123", None]) -def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): +def test_tetrahedral_dataset(tmp_path, ds_name, values_type, no_vtk=False): import tidy3d as td from tidy3d.exceptions import DataError, Tidy3dImportError - if dataset_type_ind == 0: - dataset_type = td.TetrahedralGridDataset - values_type = td.IndexedDataArray - extra_dims = {} - - if dataset_type_ind == 1: - dataset_type = td.TetrahedralGridDataset - values_type = td.IndexedVoltageDataArray - extra_dims = {"voltage": [0, 1, 2]} - - if dataset_type_ind == 2: - dataset_type = td.TetrahedralGridDataset - values_type = td.IndexedTimeDataArray - extra_dims = {"t": [0, 1, 2]} + extra_dims = TYPE_TO_EXTRA_DIMS[values_type] # basic create tet_grid_points = td.PointDataArray( @@ -376,7 +397,7 @@ def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): name=ds_name, ) - tet_grid = dataset_type( + tet_grid = td.TetrahedralGridDataset( points=tet_grid_points, cells=tet_grid_cells, values=tet_grid_values, @@ -388,7 +409,7 @@ def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): coords={"index": np.arange(8), "axis": np.arange(2)}, ) with pytest.raises(pd.ValidationError): - _ = dataset_type( + _ = td.TetrahedralGridDataset( points=tet_grid_points_bad, cells=tet_grid_cells, values=tet_grid_values, @@ -401,7 +422,7 @@ def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): ) with AssertLogLevel("WARNING"): - tet_grid_with_degenerates = dataset_type( + tet_grid_with_degenerates = td.TetrahedralGridDataset( points=tet_grid_points, cells=tet_grid_cells_bad, values=tet_grid_values, @@ -434,7 +455,7 @@ def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): coords={"cell_index": np.arange(6), "vertex_index": np.arange(3)}, ) with pytest.raises(pd.ValidationError): - _ = dataset_type( + _ = td.TetrahedralGridDataset( points=tet_grid_points, cells=tet_grid_cells_bad, values=tet_grid_values, @@ -445,7 +466,7 @@ def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): coords={"cell_index": np.arange(6), "vertex_index": np.arange(4)}, ) with pytest.raises(pd.ValidationError): - _ = dataset_type( + _ = td.TetrahedralGridDataset( points=tet_grid_points, cells=tet_grid_cells_bad, values=tet_grid_values, @@ -457,7 +478,7 @@ def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): coords=dict(index=np.arange(5), **extra_dims), ) with pytest.raises(pd.ValidationError): - _ = dataset_type( + _ = td.TetrahedralGridDataset( points=tet_grid_points, cells=tet_grid_cells_bad, values=tet_grid_values_bad, @@ -564,7 +585,7 @@ def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): # writing/reading tet_grid.to_file(tmp_path / "tri_grid_test.hdf5") - tet_grid_loaded = dataset_type.from_file(tmp_path / "tri_grid_test.hdf5") + tet_grid_loaded = td.TetrahedralGridDataset.from_file(tmp_path / "tri_grid_test.hdf5") assert tet_grid == tet_grid_loaded # writing/reading .vtu @@ -572,25 +593,25 @@ def test_tetrahedral_dataset(tmp_path, ds_name, dataset_type_ind, no_vtk=False): with pytest.raises(Tidy3dImportError): tet_grid.to_vtu(tmp_path / "tet_grid_test.vtu") with pytest.raises(Tidy3dImportError): - tet_grid_loaded = dataset_type.from_vtu(tmp_path / "tet_grid_test.vtu") + tet_grid_loaded = td.TetrahedralGridDataset.from_vtu(tmp_path / "tet_grid_test.vtu") else: tet_grid.to_vtu(tmp_path / "tet_grid_test.vtu") if len(extra_dims) == 0: - tet_grid_loaded = dataset_type.from_vtu(tmp_path / "tet_grid_test.vtu") + tet_grid_loaded = td.TetrahedralGridDataset.from_vtu(tmp_path / "tet_grid_test.vtu") assert tet_grid == tet_grid_loaded custom_name = "newname" tet_grid_renamed = tet_grid.rename(custom_name) tet_grid_renamed.to_vtu(tmp_path / "tet_grid_test.vtu") - tet_grid_loaded = dataset_type.from_vtu( + tet_grid_loaded = td.TetrahedralGridDataset.from_vtu( tmp_path / "tet_grid_test.vtu", field=custom_name ) assert tet_grid == tet_grid_loaded with pytest.raises(AttributeError): - dataset_type.from_vtu(tmp_path / "tet_grid_test.vtu", field="blah") + td.TetrahedralGridDataset.from_vtu(tmp_path / "tet_grid_test.vtu", field="blah") # test ariphmetic operations def operation(arr): @@ -602,6 +623,268 @@ def operation(arr): assert np.allclose(result.values, result_values) assert result.name == ds_name + # test conjugate + assert np.allclose(tet_grid.conj().values, np.conjugate(tet_grid.values)) + + # test norm + if "axis" in extra_dims: + axis_index = 1 + list(extra_dims.keys()).index("axis") + assert np.allclose( + tet_grid.norm(dim="axis").values, np.linalg.norm(tet_grid.values, axis=axis_index) + ) + + +@pytest.mark.parametrize("values_type", TYPE_TO_EXTRA_DIMS.keys()) +@pytest.mark.parametrize("ds_name", ["test123", None]) +def test_triangular_surface_dataset(tmp_path, ds_name, values_type, no_vtk=False): + import tidy3d as td + from tidy3d.exceptions import DataError, Tidy3dImportError + + extra_dims = TYPE_TO_EXTRA_DIMS[values_type] + + # basic create + surf_grid_points = td.PointDataArray( + [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [1.0, 1.0, 0.0]], + dims=("index", "axis"), + ) + + surf_grid_cells = td.CellDataArray( + [[0, 1, 2], [1, 2, 3]], + dims=("cell_index", "vertex_index"), + ) + + surf_grid_values = values_type( + np.random.rand(4, *[len(coord) for coord in extra_dims.values()]), + coords=dict(index=np.arange(4), **extra_dims), + name=ds_name, + ) + + surf_grid = td.TriangularSurfaceDataset( + points=surf_grid_points, + cells=surf_grid_cells, + values=surf_grid_values, + ) + assert not surf_grid.is_uniform + # test name redirect + assert surf_grid.name == ds_name + + # wrong points dimensionality (should be 3D, not 2D) + with pytest.raises(pd.ValidationError): + surf_grid_points_bad = td.PointDataArray( + np.random.random((4, 2)), + coords={"index": np.arange(4), "axis": np.arange(2)}, + ) + + _ = td.TriangularSurfaceDataset( + points=surf_grid_points_bad, + cells=surf_grid_cells, + values=surf_grid_values, + ) + + # grid with degenerate cells + surf_grid_cells_bad = td.CellDataArray( + [[0, 1, 1], [1, 2, 3]], + coords={"cell_index": np.arange(2), "vertex_index": np.arange(3)}, + ) + + with AssertLogLevel("WARNING"): + surf_grid_with_degenerates = td.TriangularSurfaceDataset( + points=surf_grid_points, + cells=surf_grid_cells_bad, + values=surf_grid_values, + ) + + # removal of degenerate cells + + # only removing degenerate cells will result in unused points in this case + with AssertLogLevel("WARNING"): + surf_grid_with_fixed = surf_grid_with_degenerates.clean( + remove_degenerate_cells=True, remove_unused_points=False + ) + assert np.all(surf_grid_with_fixed.cells.values == [[1, 2, 3]]) + + # once we remove those, no warning should occur + with AssertLogLevel(None): + surf_grid_with_fixed = surf_grid_with_fixed.clean( + remove_degenerate_cells=False, remove_unused_points=True + ) + assert np.all(surf_grid_with_fixed.cells.values == [[0, 1, 2]]) + + # doing both at the same time + with AssertLogLevel(None): + surf_grid_with_fixed = surf_grid_with_degenerates.clean() + assert np.all(surf_grid_with_fixed.cells.values == [[0, 1, 2]]) + + # invalid cell connections + surf_grid_cells_bad = td.CellDataArray( + [[0, 1, 2, 3]], + coords={"cell_index": np.arange(1), "vertex_index": np.arange(4)}, + ) + with pytest.raises(pd.ValidationError): + _ = td.TriangularSurfaceDataset( + points=surf_grid_points, + cells=surf_grid_cells_bad, + values=surf_grid_values, + ) + + surf_grid_cells_bad = td.CellDataArray( + [[0, 1, 5], [1, 2, 3]], + coords={"cell_index": np.arange(2), "vertex_index": np.arange(3)}, + ) + with pytest.raises(pd.ValidationError): + _ = td.TriangularSurfaceDataset( + points=surf_grid_points, + cells=surf_grid_cells_bad, + values=surf_grid_values, + ) + + # wrong number of values + surf_grid_values_bad = values_type( + np.random.rand(3, *[len(coord) for coord in extra_dims.values()]), + coords=dict(index=np.arange(3), **extra_dims), + ) + with pytest.raises(pd.ValidationError): + _ = td.TriangularSurfaceDataset( + points=surf_grid_points, + cells=surf_grid_cells, + values=surf_grid_values_bad, + ) + + # some auxiliary properties + assert surf_grid.bounds == ((0.0, 0.0, 0.0), (1.0, 1.0, 0.0)) + assert np.all(surf_grid._vtk_offsets == np.array([0, 3, 6])) + + if no_vtk: + with pytest.raises(Tidy3dImportError): + _ = surf_grid._vtk_cells + with pytest.raises(Tidy3dImportError): + _ = surf_grid._vtk_points + with pytest.raises(Tidy3dImportError): + _ = surf_grid._vtk_obj + else: + _ = surf_grid._vtk_cells + _ = surf_grid._vtk_points + _ = surf_grid._vtk_obj + + # plane slicing - not supported for surface datasets + if no_vtk: + with pytest.raises(Tidy3dImportError): + _ = surf_grid.plane_slice(axis=2, pos=0.5) + else: + with pytest.raises(td.exceptions.Tidy3dNotImplementedError): + _ = surf_grid.plane_slice(axis=2, pos=0.5) + + # clipping by a box + if no_vtk: + with pytest.raises(Tidy3dImportError): + _ = surf_grid.box_clip([[0.1, 0.1, -0.1], [0.9, 0.9, 0.1]]) + else: + result = surf_grid.box_clip([[0.1, 0.1, -0.1], [0.9, 0.9, 0.1]]) + assert result.name == ds_name + + # can't clip outside of grid + with pytest.raises(DataError): + _ = surf_grid.box_clip([[0.1, 0.1, 0.5], [0.9, 0.9, 1.0]]) + + # renaming + surf_grid_renamed = surf_grid.rename("renamed") + assert surf_grid_renamed.name == "renamed" + + # generalized selection method - spatial selection not supported for surface + with pytest.raises(td.exceptions.Tidy3dNotImplementedError): + _ = surf_grid.sel(x=0.5) + + # non-spatial selection should work + if len(extra_dims) > 0: + first_key = list(extra_dims.keys())[0] + first_value = extra_dims[first_key][0] + result = surf_grid.sel(**{first_key: first_value}) + assert result.name == ds_name + result_isel = surf_grid.isel(**{first_key: 0}) + assert result == result_isel + + # cannot sel along index dimension + with pytest.raises(DataError): + _ = surf_grid.sel(index=0) + with pytest.raises(DataError): + _ = surf_grid.isel(index=0) + + # writing/reading + surf_grid.to_file(tmp_path / "surf_grid_test.hdf5") + + surf_grid_loaded = td.TriangularSurfaceDataset.from_file(tmp_path / "surf_grid_test.hdf5") + assert surf_grid == surf_grid_loaded + + # writing/reading .vtu + if no_vtk: + with pytest.raises(Tidy3dImportError): + surf_grid.to_vtu(tmp_path / "surf_grid_test.vtu") + with pytest.raises(Tidy3dImportError): + surf_grid_loaded = td.TriangularSurfaceDataset.from_vtu(tmp_path / "surf_grid_test.vtu") + else: + surf_grid.to_vtu(tmp_path / "surf_grid_test.vtu") + + if len(extra_dims) == 0: + surf_grid_loaded = td.TriangularSurfaceDataset.from_vtu(tmp_path / "surf_grid_test.vtu") + assert surf_grid == surf_grid_loaded + + custom_name = "newname" + surf_grid_renamed = surf_grid.rename(custom_name) + surf_grid_renamed.to_vtu(tmp_path / "surf_grid_test.vtu") + + surf_grid_loaded = td.TriangularSurfaceDataset.from_vtu( + tmp_path / "surf_grid_test.vtu", field=custom_name + ) + assert surf_grid == surf_grid_loaded + + with pytest.raises(AttributeError): + surf_grid_loaded = td.TriangularSurfaceDataset.from_vtu( + tmp_path / "surf_grid_test.vtu", field="blah" + ) + + # test cell volumes (areas for surface) + cell_volumes = surf_grid.get_cell_volumes() + assert len(cell_volumes) == len(surf_grid.cells) + assert np.all(cell_volumes > 0) + + # test arithmetic operations + def operation(arr): + return 5 + (arr * 2 + arr.imag / 3) ** 2 / arr.real + np.log10(arr.abs) + + result = operation(surf_grid) + result_values = operation(surf_grid.values) + + assert np.allclose(result.values, result_values) + assert result.name == ds_name + + # test conjugate + assert np.allclose(surf_grid.conj().values, np.conjugate(surf_grid.values)) + + # test norm + if "axis" in extra_dims: + axis_index = 1 + list(extra_dims.keys()).index("axis") + assert np.allclose( + surf_grid.norm(dim="axis").values, np.linalg.norm(surf_grid.values, axis=axis_index) + ) + + # test non-spatial interpolation works + if len(extra_dims) > 0: + first_key = list(extra_dims.keys())[0] + first_coords = extra_dims[first_key] + # interpolate to midpoint + if isinstance(first_coords[0], str): + # Can't interpolate string coordinates + pass + else: + mid_value = (first_coords[0] + first_coords[1]) / 2 + result = surf_grid.interp(**{first_key: [mid_value]}) + assert result.name == ds_name + assert len(result.values.coords[first_key]) == 1 + + # test spatial interpolation raises error (not implemented for surfaces) + with pytest.raises(td.exceptions.Tidy3dNotImplementedError): + _ = surf_grid.interp(x=0.5, y=0.5, z=0.5) + @pytest.mark.parametrize("fill_value", [0.23123, "extrapolate"]) @pytest.mark.parametrize("use_vtk", [True, False]) diff --git a/tests/test_data/test_monitor_data.py b/tests/test_data/test_monitor_data.py index b1abe24127..713221d5ea 100644 --- a/tests/test_data/test_monitor_data.py +++ b/tests/test_data/test_monitor_data.py @@ -24,6 +24,8 @@ MediumData, ModeData, PermittivityData, + SurfaceFieldData, + SurfaceFieldTimeData, ) from tidy3d.components.data.zbf import ZBFData from tidy3d.constants import UnitScaling @@ -46,6 +48,8 @@ PERMITTIVITY_MONITOR, SIM, SIM_SYM, + SURFACE_FIELD_MONITOR, + SURFACE_FIELD_TIME_MONITOR, make_diffraction_data_array, make_far_field_data_array, make_flux_data_array, @@ -56,6 +60,9 @@ make_scalar_field_time_data_array, make_scalar_mode_field_data_array, make_scalar_mode_field_data_array_smooth, + make_surface_field_data_array, + make_surface_field_time_data_array, + make_surface_normal_data_array, ) # data array instances @@ -296,6 +303,30 @@ def make_diffraction_data(): ) +def make_surface_field_data(): + """Create a SurfaceFieldData instance for testing.""" + normal = make_surface_normal_data_array() + + # Create field datasets for E and H components + field_datasets = {} + for field in SURFACE_FIELD_MONITOR.fields: + field_datasets[field] = make_surface_field_data_array() + + return SurfaceFieldData(monitor=SURFACE_FIELD_MONITOR, normal=normal, **field_datasets) + + +def make_surface_field_time_data(): + """Create a SurfaceFieldTimeData instance for testing.""" + normal = make_surface_normal_data_array() + + # Create field datasets for E and H components + field_datasets = {} + for field in SURFACE_FIELD_TIME_MONITOR.fields: + field_datasets[field] = make_surface_field_time_data_array() + + return SurfaceFieldTimeData(monitor=SURFACE_FIELD_TIME_MONITOR, normal=normal, **field_datasets) + + """ Test them out """ @@ -1126,3 +1157,139 @@ def test_from_zbf_dimsfail(self, tmp_path, field_data, dim1, dim2): # this should fail with pytest.raises(ValueError) as e: _ = td.FieldDataset.from_zbf(filename=zbf_filename, dim1=dim1, dim2=dim2) + + +def test_surface_field_data(): + """Test SurfaceFieldData functionality.""" + data = make_surface_field_data() + + # Test field component access + for field in SURFACE_FIELD_MONITOR.fields: + field_data = getattr(data, field) + assert field_data is not None + assert hasattr(field_data, "values") + + # Test normal vector access + assert data.normal is not None + assert hasattr(data.normal, "values") + + # Test Poynting vector calculation + poynting = data.poynting + assert poynting is not None + assert hasattr(poynting, "values") + + # Test normalization + def dummy_source_spectrum(freq): + return 1.0 + 0.1j * np.ones_like(freq) + + normalized_data = data.normalize(dummy_source_spectrum) + assert isinstance(normalized_data, SurfaceFieldData) + + # Verify that the field components are different after normalization + for field_name, original_field in data.field_components.items(): + normalized_field = getattr(normalized_data, field_name) + # Should not be exactly equal due to normalization + assert not np.allclose(original_field.values, normalized_field.values) + + # test intensity calculation + intensity = data.intensity + assert intensity is not None + assert hasattr(intensity, "values") + assert np.all(np.isreal(intensity.values.values)) + + +def test_surface_field_time_data(): + """Test SurfaceFieldTimeData functionality.""" + data = make_surface_field_time_data() + + # Test field component access + for field in SURFACE_FIELD_TIME_MONITOR.fields: + field_data = getattr(data, field) + assert field_data is not None + assert hasattr(field_data, "values") + + # Test normal vector access + assert data.normal is not None + assert hasattr(data.normal, "values") + + # Test Poynting vector calculation for time domain + poynting = data.poynting + assert poynting is not None + assert hasattr(poynting, "values") + # Time domain Poynting should be real (no complex conjugate) + assert np.all(np.isreal(poynting.values.values)) + + # test intensity calculation + intensity = data.intensity + assert intensity is not None + assert hasattr(intensity, "values") + assert np.all(np.isreal(intensity.values.values)) + + +def test_surface_field_data_missing_fields(): + """Test error handling when required fields are missing for Poynting calculation.""" + normal = make_surface_normal_data_array() + + # Create data with only E field (no H field) + surface_data_partial = SurfaceFieldData( + monitor=SURFACE_FIELD_MONITOR.updated_copy(fields=("E",)), + normal=normal, + E=make_surface_field_data_array(), + H=None, + ) + + # Should raise ValueError when trying to calculate Poynting vector + with pytest.raises(DataError, match="not included in this data object"): + _ = surface_data_partial.poynting + + # Similar for current density calculation + with pytest.raises(ValueError, match="Could not calculate current density"): + _ = surface_data_partial.current_density + + +def test_surface_field_time_data_missing_fields(): + """Test error handling when required fields are missing for Poynting calculation.""" + normal = make_surface_normal_data_array() + + # Create data with only E field (no H field) + surface_data_partial = SurfaceFieldTimeData( + monitor=SURFACE_FIELD_TIME_MONITOR.updated_copy(fields=("H",)), + normal=normal, + H=make_surface_field_time_data_array(), + E=None, + ) + + # Should raise ValueError when trying to calculate Poynting vector + with pytest.raises(DataError, match="not included in this data object"): + _ = surface_data_partial.poynting + + # Can calculate current density besause E field is not required + current_density = surface_data_partial.current_density + assert current_density is not None + assert hasattr(current_density, "values") + + +def test_surface_field_data_symmetry_expanded_copy(): + """Test SurfaceFieldData symmetry functionality.""" + + # no symmetry + data = make_surface_field_data() + data_sym = data.symmetry_expanded_copy + assert data == data_sym + + data = data.updated_copy(symmetry=(1, 0, -1)) + data_sym = data.symmetry_expanded_copy + assert data != data_sym + + +def test_surface_field_time_data_symmetry_expanded_copy(): + """Test SurfaceFieldData symmetry functionality.""" + + # no symmetry + data = make_surface_field_time_data() + data_sym = data.symmetry_expanded_copy + assert data == data_sym + + data = data.updated_copy(symmetry=(1, 0, -1)) + data_sym = data.symmetry_expanded_copy + assert data != data_sym diff --git a/tests/test_data/test_unstructured_pyvista.py b/tests/test_data/test_unstructured_pyvista.py index 681eb9da3f..068e8e4bc2 100644 --- a/tests/test_data/test_unstructured_pyvista.py +++ b/tests/test_data/test_unstructured_pyvista.py @@ -297,3 +297,30 @@ def test_surface_plot_windowed_parameter(windowed, no_pyvista=False): plotter = dataset.plot(windowed=windowed, show=False) assert isinstance(plotter, pv.Plotter) plotter.close() + + +def test_surface_plot_unsupported_plotting(no_pyvista=False): + """Test plot with more than one field.""" + from tidy3d.exceptions import DataError, Tidy3dImportError + + # test more than one field not supported + dataset = _create_vector_surface() + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(field=True, grid=True, show=False) + else: + _ = dataset.isel(axis=0).plot(field=True, grid=True, show=False) + with pytest.raises(DataError): + _ = dataset.plot(field=True, grid=True, show=False) + + # test plot with complex field not supported + dataset = _create_simple_surface() + dataset = dataset.updated_copy(values=dataset.values * (1 + 1j)) + + if no_pyvista: + with pytest.raises(Tidy3dImportError): + _ = dataset.plot(field=True, grid=True, show=False) + else: + with pytest.raises(DataError): + _ = dataset.plot(field=True, grid=True, show=False) diff --git a/tidy3d/__init__.py b/tidy3d/__init__.py index 10500366b4..1d67e3d651 100644 --- a/tidy3d/__init__.py +++ b/tidy3d/__init__.py @@ -99,7 +99,6 @@ from .components.apodization import ApodizationSpec -# boundary placement for other solvers # boundary placement for other solvers from .components.bc_placement import ( MediumMediumInterface, @@ -161,6 +160,8 @@ IndexedFieldTimeDataArray, IndexedFieldVoltageDataArray, IndexedFreqDataArray, + IndexedSurfaceFreqDataArray, + IndexedSurfaceTimeDataArray, IndexedTimeDataArray, IndexedVoltageDataArray, ModeAmpsDataArray, @@ -618,6 +619,8 @@ def set_logging_level(level: str) -> None: "IndexedFieldTimeDataArray", "IndexedFieldVoltageDataArray", "IndexedFreqDataArray", + "IndexedSurfaceFreqDataArray", + "IndexedSurfaceTimeDataArray", "IndexedTimeDataArray", "IndexedVoltageDataArray", "InsulatingBC", diff --git a/tidy3d/components/base_sim/data/monitor_data.py b/tidy3d/components/base_sim/data/monitor_data.py index 3b87dc3d8f..c8be2de3b1 100644 --- a/tidy3d/components/base_sim/data/monitor_data.py +++ b/tidy3d/components/base_sim/data/monitor_data.py @@ -51,26 +51,26 @@ class AbstractUnstructuredMonitorData(AbstractMonitorData, ABC): def _symmetry_expanded_copy_base( self, - property: Union[UnstructuredGridDatasetType, SpatialDataArray], + data: Union[UnstructuredGridDatasetType, SpatialDataArray], custom_symmetry: Optional[tuple[Union[Literal[-1, 1], XrDataArray], ...]] = None, ) -> Union[UnstructuredGridDatasetType, SpatialDataArray]: - """Return the property with symmetry applied.""" + """Return the data with symmetry applied.""" # no symmetry if all(sym == 0 for sym in self.symmetry): - return property + return data - new_property = copy.copy(property) + new_data = copy.copy(data) mnt_bounds = np.array(self.monitor.bounds) - if isinstance(new_property, SpatialDataArray): + if isinstance(new_data, SpatialDataArray): data_bounds = [ - [np.min(new_property.x), np.min(new_property.y), np.min(new_property.z)], - [np.max(new_property.x), np.max(new_property.y), np.max(new_property.z)], + [np.min(new_data.x), np.min(new_data.y), np.min(new_data.z)], + [np.max(new_data.x), np.max(new_data.y), np.max(new_data.z)], ] else: - data_bounds = new_property.bounds + data_bounds = new_data.bounds dims_need_clipping_left = [] dims_need_clipping_right = [] @@ -86,15 +86,13 @@ def _symmetry_expanded_copy_base( if mnt_bounds[1][dim] < data_bounds[0][dim]: # (note that mnt_bounds[0][dim] < 2 * center - data_bounds[0][dim] will be satisfied based on backend behavior) # simple reflection - new_property = new_property.reflect( + new_data = new_data.reflect( axis=dim, center=center, reflection_only=True, symmetry=symmetry_factor ) elif mnt_bounds[0][dim] < 2 * center - data_bounds[0][dim]: # expand only if monitor bounds missing data # if we do expand, simply reflect symmetrically the whole data - new_property = new_property.reflect( - axis=dim, center=center, symmetry=symmetry_factor - ) + new_data = new_data.reflect(axis=dim, center=center, symmetry=symmetry_factor) # if it turns out that we expanded too much, we will trim unnecessary data later if mnt_bounds[0][dim] > 2 * center - data_bounds[1][dim]: @@ -105,7 +103,7 @@ def _symmetry_expanded_copy_base( dims_need_clipping_right.append(dim) # trim over-expanded data - if len(dims_need_clipping_left) > 0 or len(dims_need_clipping_right) > 0: + if dims_need_clipping_left or dims_need_clipping_right: # enlarge clipping domain on positive side arbitrary by 1 # should not matter by how much clip_bounds = [mnt_bounds[0] - 1, mnt_bounds[1] + 1] @@ -115,9 +113,9 @@ def _symmetry_expanded_copy_base( for dim in dims_need_clipping_right: clip_bounds[1][dim] = mnt_bounds[1][dim] - if isinstance(new_property, SpatialDataArray): - new_property = new_property.sel_inside(clip_bounds) + if isinstance(new_data, SpatialDataArray): + new_data = new_data.sel_inside(clip_bounds) else: - new_property = new_property.box_clip(bounds=clip_bounds) + new_data = new_data.box_clip(bounds=clip_bounds) - return new_property + return new_data diff --git a/tidy3d/components/data/data_array.py b/tidy3d/components/data/data_array.py index 598418d875..50341b186e 100644 --- a/tidy3d/components/data/data_array.py +++ b/tidy3d/components/data/data_array.py @@ -1687,6 +1687,8 @@ def _make_impedance_data_array(result: DataArray) -> ImpedanceResultTypes: IndexedFieldDataArray, IndexedFieldTimeDataArray, IndexedFreqDataArray, + IndexedSurfaceFreqDataArray, + IndexedSurfaceTimeDataArray, ] DATA_ARRAY_MAP = {data_array.__name__: data_array for data_array in DATA_ARRAY_TYPES} diff --git a/tidy3d/components/data/dataset.py b/tidy3d/components/data/dataset.py index 0e08a69d7b..d4c877d5f4 100644 --- a/tidy3d/components/data/dataset.py +++ b/tidy3d/components/data/dataset.py @@ -529,15 +529,20 @@ def current_density(self) -> TriangularSurfaceDataset: h_diff = 0 template = None - # we assume that is data is None it means field is zero on that side (e.g. PEC) - H_inside = self.H.sel(side="inside", drop=True) if "inside" in self.H.side else None - H_outside = self.H.sel(side="outside", drop=True) if "outside" in self.H.side else None - if H_inside is not None: - h_diff += H_inside.values - template = H_inside - if H_outside is not None: - h_diff -= H_outside.values - template = H_outside + if self.H is not None: + # we assume that if data is None it means field is zero on that side (e.g. PEC) + H_inside = ( + self.H.sel(side="inside", drop=True) if "inside" in self.H.values.side else None + ) + H_outside = ( + self.H.sel(side="outside", drop=True) if "outside" in self.H.values.side else None + ) + if H_inside is not None: + h_diff = h_diff + H_inside.values + template = H_inside + if H_outside is not None: + h_diff = h_diff - H_outside.values + template = H_outside if template is None: raise ValueError( diff --git a/tidy3d/components/data/monitor_data.py b/tidy3d/components/data/monitor_data.py index 1b3b62777f..55ea85ff83 100644 --- a/tidy3d/components/data/monitor_data.py +++ b/tidy3d/components/data/monitor_data.py @@ -1590,23 +1590,33 @@ def symmetry_expanded_copy(self) -> AbstractFieldData: def _symmetry_update_dict(self) -> dict: """Dictionary of data fields to create data with expanded symmetry.""" + eigvals = self.symmetry_eigenvalues + h_symmetry = [ - xr.DataArray([1, -1, -1], coords={"axis": [0, 1, 2]}) * self.symmetry[0], - xr.DataArray([-1, 1, -1], coords={"axis": [0, 1, 2]}) * self.symmetry[1], - xr.DataArray([-1, -1, 1], coords={"axis": [0, 1, 2]}) * self.symmetry[2], + xr.DataArray( + [eigvals["Hx"](dim), eigvals["Hy"](dim), eigvals["Hz"](dim)], + coords={"axis": [0, 1, 2]}, + ) + * self.symmetry[dim] + for dim in range(3) ] e_symmetry = [ - xr.DataArray([-1, 1, 1], coords={"axis": [0, 1, 2]}) * self.symmetry[0], - xr.DataArray([1, -1, 1], coords={"axis": [0, 1, 2]}) * self.symmetry[1], - xr.DataArray([1, 1, -1], coords={"axis": [0, 1, 2]}) * self.symmetry[2], + xr.DataArray( + [eigvals["Ex"](dim), eigvals["Ey"](dim), eigvals["Ez"](dim)], + coords={"axis": [0, 1, 2]}, + ) + * self.symmetry[dim] + for dim in range(3) ] # normal field is always even under symmetry n_symmetry = [ - xr.DataArray([1, -1, -1], coords={"axis": [0, 1, 2]}), - xr.DataArray([-1, 1, -1], coords={"axis": [0, 1, 2]}), - xr.DataArray([-1, -1, 1], coords={"axis": [0, 1, 2]}), + xr.DataArray( + [eigvals["Hx"](dim), eigvals["Hy"](dim), eigvals["Hz"](dim)], + coords={"axis": [0, 1, 2]}, + ) + for dim in range(3) ] updated_dict = {} @@ -1659,10 +1669,7 @@ class SurfaceFieldData(ElectromagneticSurfaceFieldData): def poynting(self) -> TriangularSurfaceDataset: """Time-averaged Poynting vector for frequency-domain data.""" - if self.E is None or self.H is None: - raise ValueError( - "Could not calculate poynting: the dataset does not contain E or H field information." - ) + self._check_fields_stored(["E", "H"]) e_field = self.E h_field = self.H @@ -1716,15 +1723,12 @@ class SurfaceFieldTimeData(ElectromagneticSurfaceFieldData): def poynting(self) -> TriangularSurfaceDataset: """Poynting vector for time-domain data.""" - if self.E is None or self.H is None: - raise ValueError( - "Could not calculate poynting: the dataset does not contain E or H field information." - ) + self._check_fields_stored(["E", "H"]) e_field = self.E h_field = self.H poynting = e_field.updated_copy( - values=np.real(xr.cross(np.real(e_field.values), np.real(h_field.values), dim="axis")) + values=xr.cross(np.real(e_field.values), np.real(h_field.values), dim="axis") ) return poynting diff --git a/tidy3d/components/data/unstructured/base.py b/tidy3d/components/data/unstructured/base.py index 29ec3fb937..cf5cf7ba7b 100644 --- a/tidy3d/components/data/unstructured/base.py +++ b/tidy3d/components/data/unstructured/base.py @@ -232,7 +232,7 @@ def name(self) -> str: # we redirect name to values.name return self.values.name - def rename(self, name: str) -> UnstructuredGridDataset: + def rename(self, name: str) -> UnstructuredDataset: """Return a renamed array.""" return self.updated_copy(values=self.values.rename(name)) @@ -638,7 +638,7 @@ def from_vtk( remove_degenerate_cells: bool = False, remove_unused_points: bool = False, ignore_invalid_cells: bool = False, - ) -> UnstructuredGridDataset: + ) -> UnstructuredDataset: """Load unstructured data from a vtk file. Parameters @@ -656,7 +656,7 @@ def from_vtk( Returns ------- - UnstructuredGridDataset + UnstructuredDataset Unstructured data. """ grid = cls._read_vtkLegacyFile(file) @@ -773,7 +773,7 @@ def _get_values_from_vtk( return values def get_cell_values(self, **kwargs): - """This function returns the cell values for the fields stored in the UnstructuredGridDataset. + """This function returns the cell values for the fields stored in the UnstructuredDataset. If multiple fields are stored per point, like in an IndexedVoltageDataArray, cell values will be provided for each of the fields unless a selection argument is provided, e.g., voltage=0.2 Parameters @@ -792,7 +792,7 @@ def get_cell_values(self, **kwargs): @abstractmethod def get_cell_volumes(self): - """Get the volumes associated to each cell.""" + """Get the volumes/areas associated to each cell.""" """ Grid operations """ @@ -1029,7 +1029,6 @@ def reflect( """ Data selection """ - @requires_vtk def sel( self, x: Union[float, ArrayLike] = None, @@ -1037,7 +1036,7 @@ def sel( z: Union[float, ArrayLike] = None, method: Optional[Literal["None", "nearest", "pad", "ffill", "backfill", "bfill"]] = None, **sel_kwargs, - ) -> Union[UnstructuredGridDataset, XrDataArray]: + ) -> Union[UnstructuredDataset, XrDataArray]: """Extract/interpolate data along one or more spatial or non-spatial directions. Must provide at least one argument among 'x', 'y', 'z' or non-spatial dimensions through additional arguments. Along spatial dimensions a suitable slicing of grid is applied (plane slice, line slice, or interpolation). Selection along non-spatial dimensions is forwarded to @@ -1118,10 +1117,6 @@ def isel( return self.updated_copy(values=self.values.isel(**sel_kwargs_only_lists)) # return self.updated_copy(values=self.values.isel(**sel_kwargs)) - -class UnstructuredGridDataset(UnstructuredDataset, np.lib.mixins.NDArrayOperatorsMixin, ABC): - """Abstract base for datasets that store unstructured grid data.""" - """ Interpolation """ def interp( @@ -1150,10 +1145,10 @@ def interp( y-coordinates of sampling points. z : Union[float, ArrayLike] = None z-coordinates of sampling points. - fill_value : Union[float, Literal["extrapolate"]] = 0 + fill_value : Union[float, Literal["extrapolate"]] = None Value to use when filling points without interpolated values. If ``"extrapolate"`` then - nearest values are used. Note: in a future version the default value will be changed - to ``"extrapolate"``. + nearest values are used. If None, defaults to 0 for backward compatibility (will change + to ``"extrapolate"`` in a future version). use_vtk : bool = False Use vtk's interpolation functionality or Tidy3D's own implementation. Note: this option will be removed in a future version. @@ -1170,7 +1165,7 @@ def interp( rel_tol : float = 1e-6 Relative tolerance when determining whether a point belongs to a cell. **coords_kwargs : dict - Keyword arguments to pass to the xarray interp() function. + Keyword arguments specifying non-spatial coordinates for interpolation (e.g., t=[0, 1, 2]). Returns ------- @@ -1178,16 +1173,19 @@ def interp( Interpolated data. """ - if fill_value is None: - log.warning( - "Default parameter setting 'fill_value=0' will be changed to " - "'fill_value=``extrapolate``' in a future version." - ) - fill_value = 0 - spatial_dims_given = any(comp is not None for comp in [x, y, z]) - if spatial_dims_given and any(comp is None for comp in [x, y, z]): - raise DataError("Must provide either all or none of 'x', 'y', and 'z'") + + if spatial_dims_given: + if any(comp is None for comp in [x, y, z]): + raise DataError("Must provide either all or none of 'x', 'y', and 'z'") + + # For spatial interpolation, use legacy default of 0 if not specified + if fill_value is None: + log.warning( + "Default parameter setting 'fill_value=0' will be changed to " + "'fill_value=``extrapolate``' in a future version." + ) + fill_value = 0 if not spatial_dims_given and len(coords_kwargs) == 0: raise DataError( @@ -1215,37 +1213,99 @@ def interp( return result - def _non_spatial_interp(self, method="linear", fill_value=np.nan, **coords_kwargs): + def _non_spatial_interp(self, method="linear", fill_value=None, **coords_kwargs): """Interpolate data at non-spatial dimensions using xarray's interp() function. Parameters ---------- method: Literal["linear", "nearest"] = "linear" Interpolation method to use. - fill_value : Union[float, Literal["extrapolate"]] = 0 + fill_value : Union[float, Literal["extrapolate"]] = None Value to use when filling points without interpolated values. If ``"extrapolate"`` then - nearest values are used. Note: in a future version the default value will be changed - to ``"extrapolate"``. + nearest values are used. **coords_kwargs : dict Keyword arguments to pass to the xarray interp() function. Returns ------- - xarray.DataArray - Interpolated data. + UnstructuredDataset + Dataset with interpolated values. """ + if fill_value is None: + fill_value = np.nan if method == "linear" else "extrapolate" + coords_kwargs_only_lists = { key: value if isinstance(value, list) else [value] for key, value in coords_kwargs.items() } + + interp_kwargs = {"method": method} + if fill_value != "extrapolate": + interp_kwargs["kwargs"] = {"fill_value": fill_value} + else: + interp_kwargs["kwargs"] = {"fill_value": "extrapolate"} + return self.updated_copy( - values=self.values.interp( - **coords_kwargs_only_lists, - method="linear", - kwargs={"fill_value": fill_value}, - ) + values=self.values.interp(**coords_kwargs_only_lists, **interp_kwargs) ) + @abstractmethod + def _spatial_interp( + self, + x: Union[float, ArrayLike], + y: Union[float, ArrayLike], + z: Union[float, ArrayLike], + fill_value: Optional[ + Union[float, Literal["extrapolate"]] + ] = None, # TODO: an array if multiple fields? + use_vtk: bool = False, + method: Literal["linear", "nearest"] = "linear", + max_samples_per_step: int = DEFAULT_MAX_SAMPLES_PER_STEP, + max_cells_per_step: int = DEFAULT_MAX_CELLS_PER_STEP, + rel_tol: float = DEFAULT_TOLERANCE_CELL_FINDING, + ) -> XrDataArray: + """Interpolate data along spatial dimensions at provided x, y, and z. + + Parameters + ---------- + x : Union[float, ArrayLike] + x-coordinates of sampling points. + y : Union[float, ArrayLike] + y-coordinates of sampling points. + z : Union[float, ArrayLike] + z-coordinates of sampling points. + fill_value : Union[float, Literal["extrapolate"]] = 0 + Value to use when filling points without interpolated values. If ``"extrapolate"`` then + nearest values are used. Note: in a future version the default value will be changed + to ``"extrapolate"``. + use_vtk : bool = False + Use vtk's interpolation functionality or Tidy3D's own implementation. Note: this + option will be removed in a future version. + method: Literal["linear", "nearest"] = "linear" + Interpolation method to use. + max_samples_per_step : int = 1e4 + Max number of points to interpolate at per iteration (used only if `use_vtk=False`). + Using a higher number may speed up calculations but, at the same time, it increases + RAM usage. + max_cells_per_step : int = 1e4 + Max number of cells to interpolate from per iteration (used only if `use_vtk=False`). + Using a higher number may speed up calculations but, at the same time, it increases + RAM usage. + rel_tol : float = 1e-6 + Relative tolerance when determining whether a point belongs to a cell. + + Returns + ------- + xarray.DataArray + Interpolated data. + """ + + +class UnstructuredGridDataset(UnstructuredDataset, np.lib.mixins.NDArrayOperatorsMixin, ABC): + """Abstract base for datasets that store unstructured grid data.""" + + """ Interpolation """ + def _spatial_interp( self, x: Union[float, ArrayLike], diff --git a/tidy3d/components/data/unstructured/surface.py b/tidy3d/components/data/unstructured/surface.py index 7338d67567..2d87408493 100644 --- a/tidy3d/components/data/unstructured/surface.py +++ b/tidy3d/components/data/unstructured/surface.py @@ -20,6 +20,9 @@ from tidy3d.packaging import pyvista, requires_pyvista, requires_vtk, vtk from .base import ( + DEFAULT_MAX_CELLS_PER_STEP, + DEFAULT_MAX_SAMPLES_PER_STEP, + DEFAULT_TOLERANCE_CELL_FINDING, UnstructuredDataset, ) @@ -145,9 +148,28 @@ def plane_slice(self, axis: Axis, pos: float) -> XrDataArray: raise Tidy3dNotImplementedError("Slicing of unstructured surfaces is not implemented yet.") + """ Interpolation """ + + def _spatial_interp( + self, + x: Union[float, ArrayLike], + y: Union[float, ArrayLike], + z: Union[float, ArrayLike], + fill_value: Optional[Union[float, Literal["extrapolate"]]] = None, + use_vtk: bool = False, + method: Literal["linear", "nearest"] = "linear", + ignore_normal_pos: bool = True, + max_samples_per_step: int = DEFAULT_MAX_SAMPLES_PER_STEP, + max_cells_per_step: int = DEFAULT_MAX_CELLS_PER_STEP, + rel_tol: float = DEFAULT_TOLERANCE_CELL_FINDING, + ) -> XrDataArray: + """Interpolate data along spatial dimensions at provided x, y, and z.""" + raise Tidy3dNotImplementedError( + "Spatial interpolation from unstructured surfaces is not implemented yet." + ) + """ Data selection """ - @requires_vtk def sel( self, x: Union[float, ArrayLike] = None, diff --git a/tidy3d/components/data/unstructured/tetrahedral.py b/tidy3d/components/data/unstructured/tetrahedral.py index f7f82a93f3..bd7c613e65 100644 --- a/tidy3d/components/data/unstructured/tetrahedral.py +++ b/tidy3d/components/data/unstructured/tetrahedral.py @@ -229,7 +229,6 @@ def _interp_py( """ Data selection """ - @requires_vtk def sel( self, x: Union[float, ArrayLike] = None, diff --git a/tidy3d/components/monitor.py b/tidy3d/components/monitor.py index cfb1f2d159..8048fa5a88 100644 --- a/tidy3d/components/monitor.py +++ b/tidy3d/components/monitor.py @@ -33,7 +33,12 @@ ObsGridArray, Size, ) -from .validators import assert_plane, validate_freqs_min, validate_freqs_not_empty +from .validators import ( + assert_plane, + assert_volumetric, + validate_freqs_min, + validate_freqs_not_empty, +) from .viz import ARROW_ALPHA, ARROW_COLOR_MONITOR BYTES_REAL = 4 @@ -1600,6 +1605,8 @@ class AbstractSurfaceMonitor(Monitor, ABC): description="For surface monitors fields are always colocated on surface.", ) + _check_volumetic = assert_volumetric() + @pydantic.validator("fields", always=True) def _warn_beta_stage(cls, val, values): """Warn that surface monitors are in beta stage.""" diff --git a/tidy3d/components/simulation.py b/tidy3d/components/simulation.py index c1477cb739..afb0727fa5 100644 --- a/tidy3d/components/simulation.py +++ b/tidy3d/components/simulation.py @@ -3977,7 +3977,7 @@ def _get_surface_monitor_bounds_self(self, monitor: SurfaceMonitorType) -> list[ return bounds @pydantic.validator("monitors", always=True) - @skip_if_fields_missing(["medium", "structures", "size", "medium"]) + @skip_if_fields_missing(["medium", "structures", "size"]) def error_empty_surface_monitor(cls, val, values): """Error if any surface monitor does not at least cross a bounding box of a PEC/LossyMetal structure.""" monitors = val diff --git a/tidy3d/components/tcad/data/monitor_data/abstract.py b/tidy3d/components/tcad/data/monitor_data/abstract.py index 687347b216..a8b17c83a9 100644 --- a/tidy3d/components/tcad/data/monitor_data/abstract.py +++ b/tidy3d/components/tcad/data/monitor_data/abstract.py @@ -60,7 +60,7 @@ def symmetry_expanded_copy(self) -> HeatChargeMonitorData: new_field_components = {} for field, val in self.field_components.items(): - new_field_components[field] = self._symmetry_expanded_copy_base(property=val) + new_field_components[field] = self._symmetry_expanded_copy_base(data=val) return self.updated_copy(symmetry=(0, 0, 0), **new_field_components) diff --git a/tidy3d/components/viz/__init__.py b/tidy3d/components/viz/__init__.py index 43a2872b68..73c86e4f12 100644 --- a/tidy3d/components/viz/__init__.py +++ b/tidy3d/components/viz/__init__.py @@ -1,7 +1,6 @@ from __future__ import annotations from .axes_utils import ( - add_ax_3d_if_none, add_ax_if_none, add_plotter_if_none, equal_aspect, @@ -69,7 +68,6 @@ "PlotParams", "Polygon", "VisualizationSpec", - "add_ax_3d_if_none", "add_ax_if_none", "add_plotter_if_none", "arrow_style", diff --git a/tidy3d/components/viz/axes_utils.py b/tidy3d/components/viz/axes_utils.py index 2c306e229b..e2f84c2d4a 100644 --- a/tidy3d/components/viz/axes_utils.py +++ b/tidy3d/components/viz/axes_utils.py @@ -106,14 +106,6 @@ def make_ax() -> Ax: return ax -def make_ax_3d() -> Ax: - """makes an empty ``ax`` with 3d projection.""" - import matplotlib.pyplot as plt - - _, ax = plt.subplots(1, 1, tight_layout=True, subplot_kw={"projection": "3d"}) - return ax - - def add_ax_if_none(plot): """Decorates ``plot(*args, **kwargs, ax=None)`` function. if ax=None in the function call, creates an ax and feeds it to rest of function. @@ -130,22 +122,6 @@ def _plot(*args, **kwargs) -> Ax: return _plot -def add_ax_3d_if_none(plot): - """Decorates ``plot(*args, **kwargs, ax=None)`` function. - if ax=None in the function call, creates an ax with 3d projection and feeds it to rest of function. - """ - - @wraps(plot) - def _plot(*args, **kwargs) -> Ax: - """New plot function using a generated ax if None.""" - if kwargs.get("ax") is None: - ax = make_ax_3d() - kwargs["ax"] = ax - return plot(*args, **kwargs) - - return _plot - - def equal_aspect(plot): """Decorates a plotting function returning a matplotlib axes. Ensures the aspect ratio of the returned axes is set to equal. From 452e987515f10b7f17f23b6454fe8a93f8982606 Mon Sep 17 00:00:00 2001 From: dbochkov-flexcompute Date: Wed, 15 Oct 2025 22:41:58 -0700 Subject: [PATCH 6/7] bug in symmetry expansion --- tidy3d/components/data/monitor_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tidy3d/components/data/monitor_data.py b/tidy3d/components/data/monitor_data.py index 55ea85ff83..1827332804 100644 --- a/tidy3d/components/data/monitor_data.py +++ b/tidy3d/components/data/monitor_data.py @@ -1613,7 +1613,7 @@ def _symmetry_update_dict(self) -> dict: # normal field is always even under symmetry n_symmetry = [ xr.DataArray( - [eigvals["Hx"](dim), eigvals["Hy"](dim), eigvals["Hz"](dim)], + [eigvals["Ex"](dim), eigvals["Ey"](dim), eigvals["Ez"](dim)], coords={"axis": [0, 1, 2]}, ) for dim in range(3) From 23a172cd2616f69d3bce81c87accc3f23e57eeed Mon Sep 17 00:00:00 2001 From: dbochkov-flexcompute Date: Fri, 17 Oct 2025 17:27:46 -0700 Subject: [PATCH 7/7] first round of comments --- tests/test_components/test_monitor.py | 21 ++- tests/test_data/test_data_arrays.py | 47 +++---- tests/test_data/test_monitor_data.py | 129 +++++++----------- tests/test_data/test_unstructured_pyvista.py | 4 + tidy3d/components/data/unstructured/base.py | 46 ++++--- .../components/data/unstructured/surface.py | 6 +- .../data/unstructured/tetrahedral.py | 2 +- .../data/unstructured/triangular.py | 13 +- tidy3d/components/material/multi_physics.py | 1 + tidy3d/components/medium.py | 27 ++-- tidy3d/components/monitor.py | 4 +- tidy3d/components/simulation.py | 23 +++- tidy3d/components/viz/axes_utils.py | 41 +++++- 13 files changed, 201 insertions(+), 163 deletions(-) diff --git a/tests/test_components/test_monitor.py b/tests/test_components/test_monitor.py index 255558353b..1436d94023 100644 --- a/tests/test_components/test_monitor.py +++ b/tests/test_components/test_monitor.py @@ -471,7 +471,10 @@ def test_surface_monitors(): ) # monitor doesn't overlap any pec structure - with pytest.raises(pydantic.ValidationError): + with pytest.raises( + pydantic.ValidationError, + match="Surface monitor surface does not cross any PEC or LossyMetalMedium structures.", + ): surf_mnt = td.SurfaceFieldMonitor( size=(0.2, 1, 1), center=(0.8, 0, 0), freqs=[td.C_0], name="surface" ) @@ -485,5 +488,19 @@ def test_surface_monitors(): ) # monitor must be volumetric - with pytest.raises(pydantic.ValidationError): + with pytest.raises(pydantic.ValidationError, match="must be volumetric"): surf_mnt = td.SurfaceFieldMonitor(size=(1, 0, 1), freqs=[td.C_0], name="surface") + + # surface monitors are not allowed in 2D simulations + with pytest.raises( + pydantic.ValidationError, + match="Simulation domain has size zero along at least one dimension; surface monitors are not allowed in this case.", + ): + surf_mnt = td.SurfaceFieldMonitor(size=(1, 1, 1), freqs=[td.C_0], name="surface") + _ = td.Simulation( + size=(1, 1, 0), + structures=[pec_sphere], + monitors=[surf_mnt], + run_time=1e-12, + grid_spec=td.GridSpec.auto(wavelength=1), + ) diff --git a/tests/test_data/test_data_arrays.py b/tests/test_data/test_data_arrays.py index 325f2abd73..6ff11aeaba 100644 --- a/tests/test_data/test_data_arrays.py +++ b/tests/test_data/test_data_arrays.py @@ -106,8 +106,6 @@ FLUX_TIME_MONITOR, DIFFRACTION_MONITOR, DIRECTIVITY_MONITOR, - # SURFACE_FIELD_MONITOR, - # SURFACE_FIELD_TIME_MONITOR, ] GRID_SPEC = td.GridSpec(wavelength=2.0) @@ -251,31 +249,28 @@ def make_surface_triangular_dataset(): return points, cells -def make_surface_field_data_array(): - """Create surface field data array for testing.""" +def make_surface_field_data_array(time: bool = False): + """Create surface field (time or freq) data array for testing.""" points, cells = make_surface_triangular_dataset() - - # Create field values on both sides of surface for frequency domain - values = (1 + 1j) * np.random.random((3, 2, 3, len(FREQS))) # index, side, axis, f - field_values = td.IndexedFieldDataArray( - values, - coords={"index": [0, 1, 2], "side": ["outside", "inside"], "axis": [0, 1, 2], "f": FREQS}, - ) - - return td.TriangularSurfaceDataset(points=points, cells=cells, values=field_values) - - -def make_surface_field_time_data_array(): - """Create surface field time data array for testing.""" - points, cells = make_surface_triangular_dataset() - - # Create field values on both sides of surface for time domain - values = (1 + 1j) * np.random.random((3, 2, 3, len(TS))) # index, side, axis, t - field_values = td.IndexedFieldTimeDataArray( - values, - coords={"index": [0, 1, 2], "side": ["outside", "inside"], "axis": [0, 1, 2], "t": TS}, - ) - + if time: + # Create field values on both sides of surface for time domain + values = (1 + 1j) * np.random.random((3, 2, 3, len(TS))) # index, side, axis, t + field_values = td.IndexedFieldTimeDataArray( + values, + coords={"index": [0, 1, 2], "side": ["outside", "inside"], "axis": [0, 1, 2], "t": TS}, + ) + else: + # Create field values on both sides of surface for frequency domain + values = (1 + 1j) * np.random.random((3, 2, 3, len(FREQS))) # index, side, axis, f + field_values = td.IndexedFieldDataArray( + values, + coords={ + "index": [0, 1, 2], + "side": ["outside", "inside"], + "axis": [0, 1, 2], + "f": FREQS, + }, + ) return td.TriangularSurfaceDataset(points=points, cells=cells, values=field_values) diff --git a/tests/test_data/test_monitor_data.py b/tests/test_data/test_monitor_data.py index 713221d5ea..ab1ec37bfa 100644 --- a/tests/test_data/test_monitor_data.py +++ b/tests/test_data/test_monitor_data.py @@ -61,7 +61,6 @@ make_scalar_mode_field_data_array, make_scalar_mode_field_data_array_smooth, make_surface_field_data_array, - make_surface_field_time_data_array, make_surface_normal_data_array, ) @@ -303,28 +302,33 @@ def make_diffraction_data(): ) -def make_surface_field_data(): - """Create a SurfaceFieldData instance for testing.""" - normal = make_surface_normal_data_array() - - # Create field datasets for E and H components - field_datasets = {} - for field in SURFACE_FIELD_MONITOR.fields: - field_datasets[field] = make_surface_field_data_array() +def make_surface_field_data(time: bool = False): + """Create a SurfaceFieldData or SurfaceFieldTimeData instance for testing. - return SurfaceFieldData(monitor=SURFACE_FIELD_MONITOR, normal=normal, **field_datasets) + Parameters + ---------- + time : bool + If True, create SurfaceFieldTimeData. Otherwise, create SurfaceFieldData. - -def make_surface_field_time_data(): - """Create a SurfaceFieldTimeData instance for testing.""" + Returns + ------- + SurfaceFieldData or SurfaceFieldTimeData + The created data object. + """ normal = make_surface_normal_data_array() - # Create field datasets for E and H components + if time: + monitor = SURFACE_FIELD_TIME_MONITOR + cls = SurfaceFieldTimeData + else: + monitor = SURFACE_FIELD_MONITOR + cls = SurfaceFieldData + field_datasets = {} - for field in SURFACE_FIELD_TIME_MONITOR.fields: - field_datasets[field] = make_surface_field_time_data_array() + for field in monitor.fields: + field_datasets[field] = make_surface_field_data_array(time=time) - return SurfaceFieldTimeData(monitor=SURFACE_FIELD_TIME_MONITOR, normal=normal, **field_datasets) + return cls(monitor=monitor, normal=normal, **field_datasets) """ Test them out """ @@ -1159,12 +1163,17 @@ def test_from_zbf_dimsfail(self, tmp_path, field_data, dim1, dim2): _ = td.FieldDataset.from_zbf(filename=zbf_filename, dim1=dim1, dim2=dim2) -def test_surface_field_data(): - """Test SurfaceFieldData functionality.""" - data = make_surface_field_data() +import pytest + + +@pytest.mark.parametrize("time", [False, True]) +def test_surface_field_data_unified(time): + """Test SurfaceFieldData and SurfaceFieldTimeData functionality.""" + data = make_surface_field_data(time=time) + monitor = data.monitor # Test field component access - for field in SURFACE_FIELD_MONITOR.fields: + for field in monitor.fields: field_data = getattr(data, field) assert field_data is not None assert hasattr(field_data, "values") @@ -1177,19 +1186,7 @@ def test_surface_field_data(): poynting = data.poynting assert poynting is not None assert hasattr(poynting, "values") - - # Test normalization - def dummy_source_spectrum(freq): - return 1.0 + 0.1j * np.ones_like(freq) - - normalized_data = data.normalize(dummy_source_spectrum) - assert isinstance(normalized_data, SurfaceFieldData) - - # Verify that the field components are different after normalization - for field_name, original_field in data.field_components.items(): - normalized_field = getattr(normalized_data, field_name) - # Should not be exactly equal due to normalization - assert not np.allclose(original_field.values, normalized_field.values) + assert np.all(np.isreal(poynting.values.values)) # test intensity calculation intensity = data.intensity @@ -1197,33 +1194,19 @@ def dummy_source_spectrum(freq): assert hasattr(intensity, "values") assert np.all(np.isreal(intensity.values.values)) + if not time: + # Test normalization (only for freq domain variant, SurfaceFieldData) + def dummy_source_spectrum(freq): + return 1.0 + 0.1j * np.ones_like(freq) -def test_surface_field_time_data(): - """Test SurfaceFieldTimeData functionality.""" - data = make_surface_field_time_data() + normalized_data = data.normalize(dummy_source_spectrum) + assert isinstance(normalized_data, SurfaceFieldData) - # Test field component access - for field in SURFACE_FIELD_TIME_MONITOR.fields: - field_data = getattr(data, field) - assert field_data is not None - assert hasattr(field_data, "values") - - # Test normal vector access - assert data.normal is not None - assert hasattr(data.normal, "values") - - # Test Poynting vector calculation for time domain - poynting = data.poynting - assert poynting is not None - assert hasattr(poynting, "values") - # Time domain Poynting should be real (no complex conjugate) - assert np.all(np.isreal(poynting.values.values)) - - # test intensity calculation - intensity = data.intensity - assert intensity is not None - assert hasattr(intensity, "values") - assert np.all(np.isreal(intensity.values.values)) + # Verify that the field components are different after normalization + for field_name, original_field in data.field_components.items(): + normalized_field = getattr(normalized_data, field_name) + # Should not be exactly equal due to normalization + assert not np.allclose(original_field.values, normalized_field.values) def test_surface_field_data_missing_fields(): @@ -1234,7 +1217,7 @@ def test_surface_field_data_missing_fields(): surface_data_partial = SurfaceFieldData( monitor=SURFACE_FIELD_MONITOR.updated_copy(fields=("E",)), normal=normal, - E=make_surface_field_data_array(), + E=make_surface_field_data_array(time=False), H=None, ) @@ -1255,7 +1238,7 @@ def test_surface_field_time_data_missing_fields(): surface_data_partial = SurfaceFieldTimeData( monitor=SURFACE_FIELD_TIME_MONITOR.updated_copy(fields=("H",)), normal=normal, - H=make_surface_field_time_data_array(), + H=make_surface_field_data_array(time=True), E=None, ) @@ -1269,27 +1252,15 @@ def test_surface_field_time_data_missing_fields(): assert hasattr(current_density, "values") -def test_surface_field_data_symmetry_expanded_copy(): - """Test SurfaceFieldData symmetry functionality.""" - - # no symmetry - data = make_surface_field_data() - data_sym = data.symmetry_expanded_copy - assert data == data_sym - - data = data.updated_copy(symmetry=(1, 0, -1)) - data_sym = data.symmetry_expanded_copy - assert data != data_sym - - -def test_surface_field_time_data_symmetry_expanded_copy(): - """Test SurfaceFieldData symmetry functionality.""" +@pytest.mark.parametrize("time", [False, True]) +def test_surface_field_data_symmetry_expanded_copy_parametrized(time): + """Test symmetry_expanded_copy functionality for SurfaceFieldData and SurfaceFieldTimeData.""" # no symmetry - data = make_surface_field_time_data() + data = make_surface_field_data(time=time) data_sym = data.symmetry_expanded_copy - assert data == data_sym + assert data == data_sym, f"Failed for time={time} with no symmetry" data = data.updated_copy(symmetry=(1, 0, -1)) data_sym = data.symmetry_expanded_copy - assert data != data_sym + assert data != data_sym, f"Failed for time={time} with symmetry (1, 0, -1)" diff --git a/tests/test_data/test_unstructured_pyvista.py b/tests/test_data/test_unstructured_pyvista.py index 068e8e4bc2..07b0b25872 100644 --- a/tests/test_data/test_unstructured_pyvista.py +++ b/tests/test_data/test_unstructured_pyvista.py @@ -187,8 +187,12 @@ def test_surface_plot_plotter_reuse(no_pyvista=False): # Add second surface with different styling result2 = dataset.plot(plotter=plotter, opacity=0.5, show=False) + # Add third surface with different styling and positional plotter + result3 = dataset.plot(plotter, opacity=0.5, show=False) + assert result1 is plotter, "Should return same plotter" assert result2 is plotter, "Should return same plotter" + assert result3 is plotter, "Should return same plotter" plotter.close() diff --git a/tidy3d/components/data/unstructured/base.py b/tidy3d/components/data/unstructured/base.py index cf5cf7ba7b..5df334eeb9 100644 --- a/tidy3d/components/data/unstructured/base.py +++ b/tidy3d/components/data/unstructured/base.py @@ -252,21 +252,26 @@ def is_uniform(self): return self.values.is_uniform @cached_property - def _values_coords_dict(self): + def _non_spatial_coords_dict(self): """Non-spatial dimensions are corresponding coordinate values of stored data.""" coord_dict = {dim: self.values.coords[dim].data for dim in self.values.dims} _ = coord_dict.pop("index") return coord_dict @cached_property - def _fields_shape(self): - """Shape in which fields are stored.""" - return [len(coord) for coord in self._values_coords_dict.values()] + def _non_spatial_dims(self): + """Non-spatial dimensions are corresponding coordinate values of stored data.""" + return [dim for dim in self.values.dims if dim != "index"] + + @cached_property + def _non_spatial_shape(self): + """Shape in which fields are stored at each point.""" + return [len(coord) for coord in self._non_spatial_coords_dict.values()] @cached_property def _num_fields(self): """Total number of stored fields.""" - return 1 if len(self._fields_shape) == 0 else np.prod(self._fields_shape) + return 1 if len(self._non_spatial_shape) == 0 else np.prod(self._non_spatial_shape) @cached_property def _values_type(self): @@ -474,7 +479,7 @@ def _vtk_obj(self): else: data_values = self.values.values - if len(self._fields_shape) > 0: + if len(self._non_spatial_shape) > 0: data_values = data_values.reshape( (len(self.points.values), (1 + self.is_complex) * self._num_fields) ) @@ -583,7 +588,7 @@ def _from_vtk_obj_internal( values_type. We also turn on by default cleaning of geometry.""" return self._from_vtk_obj( vtk_obj=vtk_obj, - field=self._values_coords_dict, + field=self._non_spatial_coords_dict, remove_degenerate_cells=remove_degenerate_cells, remove_unused_points=remove_unused_points, values_type=self._values_type, @@ -1085,7 +1090,8 @@ def _non_spatial_sel( # convert individual values into lists of length 1 # so that xarray doesn't drop the corresponding dimension sel_kwargs_only_lists = { - key: value if isinstance(value, list) else [value] for key, value in sel_kwargs.items() + key: value if isinstance(value, list) or key not in self._non_spatial_dims else [value] + for key, value in sel_kwargs.items() } return self.updated_copy(values=self.values.sel(**sel_kwargs_only_lists, method=method)) @@ -1112,10 +1118,10 @@ def isel( # convert individual values into lists of length 1 # so that xarray doesn't drop the corresponding dimension sel_kwargs_only_lists = { - key: value if isinstance(value, list) else [value] for key, value in sel_kwargs.items() + key: value if isinstance(value, list) or key not in self._non_spatial_dims else [value] + for key, value in sel_kwargs.items() } return self.updated_copy(values=self.values.isel(**sel_kwargs_only_lists)) - # return self.updated_copy(values=self.values.isel(**sel_kwargs)) """ Interpolation """ @@ -1372,7 +1378,7 @@ def _spatial_interp( if use_vtk: if self.is_complex: raise DataError("Option 'use_vtk=True' is not supported for complex datasets.") - if len(self._fields_shape) > 0: + if len(self._non_spatial_shape) > 0: raise DataError( "Option 'use_vtk=True' is not supported for multidimensional datasets." ) @@ -1395,9 +1401,9 @@ def _spatial_interp( ) coords_dict = {"x": x, "y": y, "z": z} - coords_dict.update(self._values_coords_dict) + coords_dict.update(self._non_spatial_coords_dict) - if len(self._values_coords_dict) == 0: + if len(self._non_spatial_coords_dict) == 0: return SpatialDataArray(interpolated_values, coords=coords_dict, name=self.values.name) else: return XrDataArray(interpolated_values, coords=coords_dict, name=self.values.name) @@ -1658,7 +1664,8 @@ def _interp_py_general( # let's allocate an array for resulting values # every time we process a chunk of samples, we will write into this array interpolated_values = fill_value + np.zeros( - [len(xyz_comp) for xyz_comp in xyz_grid] + self._fields_shape, dtype=self.values.dtype + [len(xyz_comp) for xyz_comp in xyz_grid] + self._non_spatial_shape, + dtype=self.values.dtype, ) processed_cells_global = 0 @@ -1763,7 +1770,7 @@ def _interp_py_general( # in case of 2d grid broadcast results along normal direction assuming translational # invariance if num_dims == 2: - orig_shape = [len(x), len(y), len(z), *self._fields_shape] + orig_shape = [len(x), len(y), len(z), *self._non_spatial_shape] flat_shape = orig_shape.copy() flat_shape[axis_ignore] = 1 interpolated_values = np.reshape(interpolated_values, flat_shape) @@ -1945,7 +1952,9 @@ def _interp_py_chunk( # interpolated_value = value0 * face0_sdf / dist0_sdf + ... # (because face0_sdf / dist0_sdf is linear shape function for vertex0) sdf = -inf * np.ones(num_samples_total) - interpolated = np.zeros([num_samples_total, *self._fields_shape], dtype=self._double_type) + interpolated = np.zeros( + [num_samples_total, *self._non_spatial_shape], dtype=self._double_type + ) # coordinates of each sample point sample_xyz = np.zeros((num_samples_total, num_dims)) @@ -1981,9 +1990,10 @@ def _interp_py_chunk( tmp = self._double_type( data_values.sel(index=cell_connections[step_cell_map, face_ind]).data ) - tmp *= np.reshape(d, [num_samples_total] + [1] * len(self._fields_shape)) + tmp *= np.reshape(d, [num_samples_total] + [1] * len(self._non_spatial_shape)) tmp /= np.reshape( - dist[face_ind, step_cell_map], [num_samples_total] + [1] * len(self._fields_shape) + dist[face_ind, step_cell_map], + [num_samples_total] + [1] * len(self._non_spatial_shape), ) # ignore degenerate cells diff --git a/tidy3d/components/data/unstructured/surface.py b/tidy3d/components/data/unstructured/surface.py index 2d87408493..65855853ac 100644 --- a/tidy3d/components/data/unstructured/surface.py +++ b/tidy3d/components/data/unstructured/surface.py @@ -297,7 +297,7 @@ def plot( raise DataError( "Unstructured dataset contains more than 1 field. " "Use '.sel()' to select a single field from available dimensions " - f"{self._values_coords_dict} before plotting." + f"{self._non_spatial_coords_dict} before plotting." ) # Check for complex values @@ -321,7 +321,7 @@ def plot( if field: # Add scalar values to the mesh data_values = self.values.values - if len(self._fields_shape) > 0: + if len(self._non_spatial_shape) > 0: data_values = data_values.reshape(len(self.points.values), self._num_fields) mesh[values_name] = data_values scalars = values_name @@ -421,7 +421,7 @@ def quiver( raise DataError( "Unstructured dataset must contain exactly 3 fields for quiver plotting. " "Use '.sel()' to select fields from available dimensions " - f"{self._values_coords_dict} before plotting." + f"{self._non_spatial_coords_dict} before plotting." ) # Extract downsampled points diff --git a/tidy3d/components/data/unstructured/tetrahedral.py b/tidy3d/components/data/unstructured/tetrahedral.py index bd7c613e65..73acd2e743 100644 --- a/tidy3d/components/data/unstructured/tetrahedral.py +++ b/tidy3d/components/data/unstructured/tetrahedral.py @@ -110,7 +110,7 @@ def plane_slice(self, axis: Axis, pos: float) -> TriangularGridDataset: slice_vtk, remove_degenerate_cells=True, remove_unused_points=True, - field=self._values_coords_dict, + field=self._non_spatial_coords_dict, values_type=self._values_type, expect_complex=self.is_complex, ) diff --git a/tidy3d/components/data/unstructured/triangular.py b/tidy3d/components/data/unstructured/triangular.py index 2b7c87341b..bd4bbef98b 100644 --- a/tidy3d/components/data/unstructured/triangular.py +++ b/tidy3d/components/data/unstructured/triangular.py @@ -247,7 +247,7 @@ def plane_slice(self, axis: Axis, pos: float) -> XrDataArray: values = self._get_values_from_vtk( slice_vtk, len(points_numpy), - field=self._values_coords_dict, + field=self._non_spatial_coords_dict, values_type=self._values_type, expect_complex=self.is_complex, ) @@ -261,7 +261,7 @@ def plane_slice(self, axis: Axis, pos: float) -> XrDataArray: coords[self.normal_axis] = [self.normal_pos] coords[slice_axis] = points_numpy[:, slice_axis] coords_dict = dict(zip("xyz", coords)) - coords_dict.update(self._values_coords_dict) + coords_dict.update(self._non_spatial_coords_dict) # reshape values from a 1d array into a 3d array new_shape = [1, 1, 1] @@ -393,13 +393,14 @@ def _spatial_interp( max_cells_per_step=max_cells_per_step, ) interp_broadcasted = np.broadcast_to( - interp_inplane, [len(np.atleast_1d(comp)) for comp in [x, y, z]] + self._fields_shape + interp_inplane, + [len(np.atleast_1d(comp)) for comp in [x, y, z]] + self._non_spatial_shape, ) coords_dict = {"x": x, "y": y, "z": z} - coords_dict.update(self._values_coords_dict) + coords_dict.update(self._non_spatial_coords_dict) - if len(self._values_coords_dict) == 0: + if len(self._non_spatial_coords_dict) == 0: return SpatialDataArray(interp_broadcasted, coords=coords_dict, name=self.values.name) else: return XrDataArray(interp_broadcasted, coords=coords_dict, name=self.values.name) @@ -646,7 +647,7 @@ def plot( raise DataError( "Unstructured dataset contains more than 1 field. " "Use '.sel()' to select a single field from available dimensions " - f"{self._values_coords_dict} before plotting." + f"{self._non_spatial_coords_dict} before plotting." ) plot_obj = ax.tripcolor( self._triangulation_obj, diff --git a/tidy3d/components/material/multi_physics.py b/tidy3d/components/material/multi_physics.py index 7e26a476b9..8cc946a139 100644 --- a/tidy3d/components/material/multi_physics.py +++ b/tidy3d/components/material/multi_physics.py @@ -158,6 +158,7 @@ def __getattr__(self, name: str): "eps_complex_to_nk": self.optical, "nonlinear_spec": self.optical, "is_pec": self.optical, + "is_pec_like": self.optical, "is_time_modulated": self.optical, "is_nonlinear": self.optical, "is_fully_anisotropic": self.optical, diff --git a/tidy3d/components/medium.py b/tidy3d/components/medium.py index 3ff4a48d92..0ed5cd2039 100644 --- a/tidy3d/components/medium.py +++ b/tidy3d/components/medium.py @@ -1363,6 +1363,11 @@ def is_pec(self): """Whether the medium is a PEC.""" return False + @cached_property + def is_pec_like(self): + """Whether the medium is treated as a PEC medium in surface monitors.""" + return self.is_pec + @cached_property def is_pmc(self): """Whether the medium is a PMC.""" @@ -6181,6 +6186,11 @@ def _positive_conductivity(cls, val): raise ValidationError("For lossy metal, 'conductivity' must be positive. ") return val + @cached_property + def is_pec_like(self): + """Whether the medium is treated as a PEC medium in surface monitors.""" + return True + @cached_property def _fitting_result(self) -> tuple[PoleResidue, float]: """Fitted scaled surface impedance and residue.""" @@ -8103,20 +8113,3 @@ def medium_from_nk(n: float, k: float, freq: float, **kwargs) -> Union[Medium, L if eps_complex.real >= 1: return Medium.from_nk(n, k, freq, **kwargs) return Lorentz.from_nk(n, k, freq, **kwargs) - - -def _is_pec_like(medium: MediumType3D) -> bool: - """Check whether the medium is PEC or lossy metal. - - Parameters - ---------- - medium : MediumType3D - Medium to check - - Returns - ------- - bool - True if PEC or lossy metal. - """ - - return medium.is_pec or isinstance(medium, LossyMetalMedium) diff --git a/tidy3d/components/monitor.py b/tidy3d/components/monitor.py index 8048fa5a88..cfc1f18a02 100644 --- a/tidy3d/components/monitor.py +++ b/tidy3d/components/monitor.py @@ -1599,8 +1599,8 @@ class AbstractSurfaceMonitor(Monitor, ABC): "first and last point of the monitor grid are always included.", ) - colocate: Literal[False] = pydantic.Field( - False, + colocate: Literal[True] = pydantic.Field( + True, title="Colocate Fields", description="For surface monitors fields are always colocated on surface.", ) diff --git a/tidy3d/components/simulation.py b/tidy3d/components/simulation.py index afb0727fa5..beda5dcd3c 100644 --- a/tidy3d/components/simulation.py +++ b/tidy3d/components/simulation.py @@ -69,7 +69,6 @@ MediumType, MediumType3D, PECMedium, - _is_pec_like, ) from .monitor import ( AbstractFieldProjectionMonitor, @@ -3946,12 +3945,12 @@ def _get_surface_monitor_bounds( sim_box = Box(center=center, size=size) mnt_bounds = Box.bounds_intersection(monitor.bounds, sim_box.bounds) - if _is_pec_like(medium): + if medium.is_pec_like: return [mnt_bounds] bounds = [] for structure in structures: - if _is_pec_like(structure.medium): + if structure.medium.is_pec_like: intersection_bounds = Box.bounds_intersection(mnt_bounds, structure.geometry.bounds) if all(bmin <= bmax for bmin, bmax in zip(*intersection_bounds)): bounds.append(intersection_bounds) @@ -3964,12 +3963,12 @@ def _get_surface_monitor_bounds_self(self, monitor: SurfaceMonitorType) -> list[ sim_box = Box(center=self.center, size=self.size) mnt_bounds = Box.bounds_intersection(monitor.bounds, sim_box.bounds) - if _is_pec_like(self.medium): + if self.medium.is_pec_like: return [mnt_bounds] bounds = [] for structure in self.structures: - if _is_pec_like(structure.medium): + if structure.medium.is_pec_like: intersection_bounds = Box.bounds_intersection(mnt_bounds, structure.geometry.bounds) if all(bmin <= bmax for bmin, bmax in zip(*intersection_bounds)): bounds.append(intersection_bounds) @@ -3994,6 +3993,20 @@ def error_empty_surface_monitor(cls, val, values): ) return val + @pydantic.validator("monitors", always=True) + @skip_if_fields_missing(["size"]) + def error_surface_monitors_with_zero_size(cls, val, values): + """Error if simulation has surface monitors and the size of domain is zero along any dimension.""" + size = values.get("size") + monitors = val + not_3d = any(dim == 0 for dim in size) + surface_monitors_present = any(isinstance(mnt, SurfaceMonitorType) for mnt in monitors) + if not_3d and surface_monitors_present: + raise SetupError( + "Simulation domain has size zero along at least one dimension; surface monitors are not allowed in this case." + ) + return val + @pydantic.validator("grid_spec", always=True) @skip_if_fields_missing(["medium", "sources", "structures"]) def _warn_grid_size_too_small(cls, val, values): diff --git a/tidy3d/components/viz/axes_utils.py b/tidy3d/components/viz/axes_utils.py index e2f84c2d4a..b77eac26d6 100644 --- a/tidy3d/components/viz/axes_utils.py +++ b/tidy3d/components/viz/axes_utils.py @@ -1,5 +1,6 @@ from __future__ import annotations +import inspect from functools import wraps from typing import Optional @@ -139,20 +140,45 @@ def _plot(*args, **kwargs) -> Ax: def _is_notebook() -> bool: - """Detect if running in Jupyter notebook. + """Detect if running in an interactive notebook environment. + + This function detects various notebook environments including: + - Jupyter Notebook + - JupyterLab + - Google Colab + - VSCode Interactive Window/Notebook + - Other IPython-based environments Returns ------- bool - True if running in Jupyter notebook, False otherwise. + True if running in a notebook environment, False otherwise. """ try: + # Check for Google Colab first + import sys + + if "google.colab" in sys.modules: + return True + + # Check for IPython from IPython import get_ipython + # Get the IPython instance ipython = get_ipython() if ipython is None: return False - return "IPKernelApp" in ipython.config + + # Check multiple attributes that indicate notebook environment + if "IPKernelApp" in ipython.config: + return True + if hasattr(ipython, "execution_count"): + return True + # Check if we're in VSCode's interactive window + if any(name.startswith("jupyter") for name in ipython.__class__.__name__.split(".")): + return True + + return False except (ImportError, AttributeError): return False @@ -181,7 +207,14 @@ def add_plotter_if_none(plot): def _plot(*args, **kwargs): """New plot function using a generated plotter if None.""" # Extract decorator-specific parameters - plotter = kwargs.get("plotter") + + plotter = kwargs.get("plotter", None) + + if plotter is None: + sig = inspect.signature(plot) + bound_arguments = sig.bind(*args, **kwargs) + plotter = bound_arguments.arguments.get("plotter", None) + show = kwargs.pop("show", True) windowed = kwargs.pop("windowed", None) window_size = kwargs.pop("window_size", (800, 600))