Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
b3b53d1
Initial commit
yuanshen-flexcompute Sep 8, 2025
8a2114b
Update GroundedCPWViaFence.ipynb
yuanshen-flexcompute Sep 8, 2025
a33a306
Update LinearLumpedElements.ipynb
yuanshen-flexcompute Sep 8, 2025
55e8b99
Update 2 notebooks
yuanshen-flexcompute Sep 8, 2025
1b37513
ruff all notebooks
yuanshen-flexcompute Sep 8, 2025
684447d
Updating smatrix notebook
momchil-flex Sep 8, 2025
cf23e2f
Ran 3 (CircularlyPolarized, CoupledLineBandpass, DiffStripline)
yuanshen-flexcompute Sep 11, 2025
e155550
Ran another 4 (AntennaChar, EdgeFeed, Grounded, Hybrid)
yuanshen-flexcompute Sep 11, 2025
9a15152
Ran more notebooks
yuanshen-flexcompute Sep 11, 2025
b8ccb55
autograd smatrix notebook
tylerflex Jul 25, 2025
69cf961
deprecate: remove adjoint plugin
daquinteroflex Sep 12, 2025
880e924
refactor(tidy3d): FXC-3960 Update notebooks to use new config API
marcorudolphflex Nov 20, 2025
b48dd19
feat(tidy3d): FXC-3718-docs-and-examples-for-container-aware-run (#408)
daquinteroflex Nov 21, 2025
a3b3ba3
fix: combine Luneburg lens media into GeometryGroup (#411)
momchil-flex Nov 24, 2025
7107e54
update RF/microwave notebooks with new path integral names and mode i…
dmarek-flex Sep 29, 2025
0fb6860
fix: removing warnings in Autograd notebook 17 (#414)
momchil-flex Nov 28, 2025
b4ae879
ruff format
dmarek-flex Dec 5, 2025
cb83443
update notebooks to use rf namespace
dmarek-flex Dec 5, 2025
ed47632
ruff fix and remove accidental import
dmarek-flex Dec 8, 2025
91373b0
FXC-3960 Update Primer to use new config API
marcorudolphflex Dec 10, 2025
31bf0f7
inverse design seminar notebooks
yaugenst-flex Oct 21, 2025
dbeece0
Merge branch 'develop' into pre/2.10
yaugenst-flex Dec 10, 2025
6d0a9b3
docs: remove experimental autograd notes and update adjoint plugin re…
yaugenst-flex Dec 10, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
344 changes: 344 additions & 0 deletions 2025-10-09-invdes-seminar/00_setup_guide.ipynb

Large diffs are not rendered by default.

556 changes: 556 additions & 0 deletions 2025-10-09-invdes-seminar/01_bayes.ipynb

Large diffs are not rendered by default.

523 changes: 523 additions & 0 deletions 2025-10-09-invdes-seminar/02_adjoint.ipynb

Large diffs are not rendered by default.

1,032 changes: 1,032 additions & 0 deletions 2025-10-09-invdes-seminar/03_sensitivity.ipynb

Large diffs are not rendered by default.

441 changes: 441 additions & 0 deletions 2025-10-09-invdes-seminar/04_adjoint_robust.ipynb

Large diffs are not rendered by default.

427 changes: 427 additions & 0 deletions 2025-10-09-invdes-seminar/05_robust_comparison.ipynb

Large diffs are not rendered by default.

474 changes: 474 additions & 0 deletions 2025-10-09-invdes-seminar/06_measurement_calibration.ipynb

Large diffs are not rendered by default.

132 changes: 132 additions & 0 deletions 2025-10-09-invdes-seminar/optim.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
"""Utility routines for functional-style optimization in the tutorial notebooks.

The helpers here avoid mutating inputs so they play nicely with autograd.
"""

import autograd.numpy as np
from autograd.misc import flatten


def clip_params(params, bounds):
"""Clip a parameter dictionary according to per-key bounds.

Parameters
----------
params : dict[str, np.ndarray]
Dictionary mapping parameter names to array values.
bounds : dict[str, tuple[float | None, float | None]]
Lower and upper limits for each parameter. Missing keys default to no
clipping. ``None`` disables a bound on that side.

Returns
-------
dict[str, np.ndarray]
New dictionary with values clipped to the requested interval.
"""
clipped = {}
for key, value in params.items():
lo, hi = bounds.get(key, (None, None))
lo_val = -np.inf if lo is None else lo
hi_val = np.inf if hi is None else hi
clipped[key] = np.clip(value, lo_val, hi_val)
return clipped


def _flatten(tree):
"""Return a flat representation of a pytree and its inverse transform."""
flat, unflatten = flatten(tree)
return np.array(flat, dtype=float), unflatten


def init_adam(params, lr=1e-2, beta1=0.9, beta2=0.999, eps=1e-8):
"""Initialize Adam optimizer state for a parameter pytree.

Parameters
----------
params : dict[str, np.ndarray]
Current parameter values used to size the optimizer state.
lr : float = 1e-2
Learning rate applied to each step.
beta1 : float = 0.9
Exponential decay applied to the first moment estimate.
beta2 : float = 0.999
Exponential decay applied to the second moment estimate.
eps : float = 1e-8
Numerical stabilizer added inside the square-root denominator.

Returns
-------
dict[str, object]
Dictionary holding the Adam accumulator vectors and hyperparameters.
"""
flat_params, unflatten = _flatten(params)
state = {
"t": 0,
"m": np.zeros_like(flat_params),
"v": np.zeros_like(flat_params),
"unflatten": unflatten,
"lr": lr,
"beta1": beta1,
"beta2": beta2,
"eps": eps,
}
return state


def adam_update(grads, state):
"""Compute Adam parameter updates from gradients and state.

Parameters
----------
grads : dict[str, np.ndarray]
Gradient pytree with the same structure as the parameters.
state : dict[str, object]
Optimizer state returned by :func:`init_adam`.

Returns
-------
updates : dict[str, np.ndarray]
Parameter deltas that should be subtracted from the current values.
new_state : dict[str, object]
Updated optimiser state after incorporating the gradients.
"""
g_flat, _ = _flatten(grads)
t = state["t"] + 1

beta1 = state["beta1"]
beta2 = state["beta2"]
m = (1 - beta1) * g_flat + beta1 * state["m"]
v = (1 - beta2) * (g_flat * g_flat) + beta2 * state["v"]

m_hat = m / (1 - beta1**t)
v_hat = v / (1 - beta2**t)
updates_flat = state["lr"] * (m_hat / (np.sqrt(v_hat) + state["eps"]))

new_state = {
**state,
"t": t,
"m": m,
"v": v,
}
updates = state["unflatten"](updates_flat)
return updates, new_state


def apply_updates(params, updates):
"""Apply additive updates to a parameter pytree.

Parameters
----------
params : dict[str, np.ndarray]
Original parameter dictionary.
updates : dict[str, np.ndarray]
Update dictionary produced by :func:`adam_update`.

Returns
-------
dict[str, np.ndarray]
New dictionary with ``updates`` subtracted element-wise.
"""
p_flat, unflatten = _flatten(params)
u_flat, _ = _flatten(updates)
return unflatten(p_flat - u_flat)
73 changes: 73 additions & 0 deletions 2025-10-09-invdes-seminar/results/gc_adjoint_best.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"widths_si": [
0.48262618509355615,
0.5207667397076212,
0.45559630741287455,
0.46361599591364383,
0.44415937806339206,
0.4725359297284237,
0.4897650205171269,
0.43401055421044743,
0.5470685569556949,
0.36090417995022805,
0.35112952011499815,
0.25182851621035,
0.2287991538613288,
0.21772272282716135,
0.5871457399636976
],
"gaps_si": [
0.6608377498745214,
0.7153002966535659,
0.6755416250853287,
0.762711911245917,
0.6957603543580327,
0.6485980725930465,
0.7270242877193821,
0.6569877864900205,
0.7434394276954258,
0.8910689853995577,
0.92010444487145,
0.887662287039533,
0.8439724990649012,
0.7880932609023489,
0.7992416233438039
],
"widths_sin": [
0.7891411537966333,
0.6441362131696193,
0.5221408734233975,
0.31370712049190075,
0.6036396259080945,
0.5709134822507435,
0.6102929883304251,
0.5666814968867978,
0.5911545201167835,
0.5511240455381605,
0.6759490391650566,
0.424347404772533,
0.4917036091769178,
0.5990651442832533,
0.7041841301345496
],
"gaps_sin": [
0.4541255482246594,
0.4802605152344745,
0.3,
0.4914339475589058,
0.5480930702315364,
0.6026168524939672,
0.6561924161853298,
0.5934806415337143,
0.478494886109227,
0.44772190354423175,
0.7331937769153588,
0.6299485623886972,
0.48849470041329063,
0.35636407607194925,
0.5135103145142313
],
"first_gap_si": -0.6720330444742626,
"first_gap_sin": 0.5035568088634116,
"target_power": 0.5676497430872463
}
73 changes: 73 additions & 0 deletions 2025-10-09-invdes-seminar/results/gc_adjoint_robust_best.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
{
"widths_si": [
0.4920388608057116,
0.5269663382801457,
0.47051077631906724,
0.45640313960745277,
0.4463409120598345,
0.479710427467526,
0.4926663437517315,
0.44457287894940967,
0.545014746020923,
0.36598776983110565,
0.3329813653191155,
0.2468853437833761,
0.23766189386793363,
0.23602095263192163,
0.5993116787228173
],
"gaps_si": [
0.6594384762369605,
0.7075580062177993,
0.6811015769993951,
0.7457468608769762,
0.6946555894542846,
0.6444157147446795,
0.7121732105630795,
0.6468550006640721,
0.7334476826411184,
0.8848483116592654,
0.926354310270442,
0.9035257883720766,
0.8629975364993915,
0.8031133570655837,
0.7992416233438039
],
"widths_sin": [
0.8194008890879501,
0.654976203326419,
0.5154062669153403,
0.28937104202011993,
0.5797843891332557,
0.5547076037062613,
0.6077917250899648,
0.5628056343171124,
0.5933617318869786,
0.548751792878745,
0.6887370577523546,
0.41013832947358364,
0.4913432960091517,
0.6137520507037625,
0.7155681547704924
],
"gaps_sin": [
0.45753036557675575,
0.5098708096717812,
0.3,
0.49309132198664574,
0.5499656317615206,
0.6082880193683878,
0.6647065648783161,
0.6049556315727266,
0.48280853554230346,
0.45039397074013265,
0.7420847046346506,
0.6328210425726839,
0.5020955912266114,
0.3587164271526588,
0.5135103145142313
],
"first_gap_si": -0.6811767256145679,
"first_gap_sin": 0.4802966569812285,
"etch_bias_modeled": 0.02
}
9 changes: 9 additions & 0 deletions 2025-10-09-invdes-seminar/results/gc_bayes_opt_best.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"width_si": 0.3983180007432449,
"gap_si": 0.7992416233438039,
"width_sin": 0.5781958117277934,
"gap_sin": 0.5135103145142313,
"first_gap_si": -0.6933388041768698,
"target_power": 0.3425821844561507,
"coupling_loss_db": 4.652352257066546
}
Loading