diff --git a/epsic_tools/mib2hdfConvert/auto_submission/Auto_MIB_&_Ptycho.ipynb b/epsic_tools/mib2hdfConvert/auto_submission/Auto_MIB_&_Ptycho.ipynb new file mode 100644 index 0000000..cbede6a --- /dev/null +++ b/epsic_tools/mib2hdfConvert/auto_submission/Auto_MIB_&_Ptycho.ipynb @@ -0,0 +1,1354 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "aafd76e4-24b3-4fdb-a93d-31e24c14c591", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b4e598bf49ff4843a25996df718b0901", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "VBox(children=(HTML(value='Step 1: Select tick boxes → template → session details, then Start.'), HTML(…" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import os\n", + "import json\n", + "import time\n", + "import subprocess\n", + "from pathlib import Path\n", + "import shlex\n", + "from datetime import datetime\n", + "\n", + "import h5py\n", + "import numpy as np\n", + "from openpyxl import Workbook, load_workbook\n", + "\n", + "\n", + "\n", + " ###================================================================================================================================###\n", + " \n", + " ###Creating JSON File if performing auto ptyychography###\n", + "\n", + " ###================================================================================================================================###\n", + "\n", + " #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Template selection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#\n", + "'''\n", + " When creating the json files for ptychographic recon we need to use a template that matches the conditions 4D-STEM data was taken with \n", + "'''\n", + "\n", + "TEMPLATE_MAP = {\n", + " \"80 KeV\": \"/dls_sw/e02/PtyREX_templates/80KeV_template.json\",\n", + " \"200 KeV\": \"/dls_sw/e02/PtyREX_templates/200KeV_template.json\",\n", + " \"300 KeV\": \"/dls_sw/e02/PtyREX_templates/300KeV_template.json\",\n", + " \"300 KeV (Multi-slice)\": \"/dls_sw/e02/PtyREX_templates/300KeV_template_ms.json\",\n", + "}\n", + "\n", + "def _in_notebook() -> bool:\n", + " try:\n", + " from IPython import get_ipython \n", + " ip = get_ipython()\n", + " if ip is None:\n", + " return False\n", + " return \"IPKernelApp\" in ip.config\n", + " except Exception:\n", + " return False\n", + "\n", + "\n", + "def select_template_cli(templates: dict[str, str]) -> str:\n", + " \"\"\"\n", + " Terminal prompt selection.\n", + " Returns template_json_path.\n", + " \"\"\"\n", + " keys = list(templates.keys())\n", + " print(\"\\nSelect PtyREX template:\")\n", + " for i, k in enumerate(keys, 1):\n", + " print(f\" {i}) {k} -> {templates[k]}\")\n", + " while True:\n", + " choice = input(f\"Enter choice [1-{len(keys)}] (or paste full path): \").strip()\n", + " if not choice:\n", + " continue\n", + " if choice.startswith(\"/\"):\n", + " return choice\n", + " if choice.isdigit():\n", + " idx = int(choice)\n", + " if 1 <= idx <= len(keys):\n", + " return templates[keys[idx - 1]]\n", + " print(\"Invalid selection, try again.\")\n", + "\n", + "\n", + "def ask_bool_cli(prompt: str, default: bool) -> bool:\n", + " suffix = \"Y/n\" if default else \"y/N\"\n", + " while True:\n", + " s = input(f\"{prompt} [{suffix}]: \").strip().lower()\n", + " if s == \"\":\n", + " return default\n", + " if s in (\"y\", \"yes\"):\n", + " return True\n", + " if s in (\"n\", \"no\"):\n", + " return False\n", + " print(\"Please enter y or n.\")\n", + "\n", + "\n", + " #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Auto customisation of JSON template ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#\n", + "'''\n", + " Purpose of this is to impose your experimental conditions into the JSON template.\n", + " This generally helps initialise the probe during the ptychographic reconstruction by establishing key parameters.\n", + " Parameters adjusted are shown in the table below.\n", + "'''\n", + " # +----------------------+-----------------------------------------------+-------------------------------+ # \n", + " # | Parameter | JSON path | Source / Notes | # \n", + " # +----------------------+-----------------------------------------------+-------------------------------+ # \n", + " # | Scan rotation | process.common.scan.rotation | Voltage-dependent offset | # \n", + " # | Camera length | experiment.detector.position[2] | factor * nominal CL | # \n", + " # | Convergence angle | experiment.optics.lens.alpha | 2 × semi-conv (aperture map) | # \n", + " # | Scan shape (N) | process.common.scan.N | metadata or inferred | # \n", + " # | Scan step size | process.common.scan.dR | metadata step_size(m) | # \n", + " # | Beam energy | process.common.source.energy | acceleration voltage | # \n", + " # | Defocus | experiment.optics.lens.defocus | metadata defocus (nm → m) | # \n", + " # | Data path | experiment.data.data_path | input HDF5 file | #\n", + " # | Output directories | base_dir / process.save_dir | out_dir | #\n", + " # +----------------------+-----------------------------------------------+-------------------------------+ #\n", + "\n", + "\n", + "\n", + "def meta2config(acc_v, nominal_cl_m, aperture_id, nominal_rot, factor=1.7):\n", + " conv_angle = 0.0 \n", + " if np.isclose(acc_v, 80e3):\n", + " rot_angle = 238.5 - nominal_rot\n", + " conv_map = {1: 41.65e-3, 2: 31.74e-3, 3: 24.80e-3, 4: 15.44e-3}\n", + " conv_angle = conv_map.get(int(aperture_id), 0.0)\n", + " elif np.isclose(acc_v, 200e3):\n", + " rot_angle = -77.585 - nominal_rot\n", + " conv_map = {1: 37.7e-3, 2: 28.8e-3, 3: 22.4e-3, 4: 14.0e-3, 5: 6.4e-3}\n", + " conv_angle = conv_map.get(int(aperture_id), 0.0)\n", + " elif np.isclose(acc_v, 300e3):\n", + " rot_angle = -85.5 - nominal_rot\n", + " conv_map = {1: 44.7e-3, 2: 34.1e-3, 3: 26.7e-3, 4: 16.7e-3}\n", + " conv_angle = conv_map.get(int(aperture_id), 0.0)\n", + " else:\n", + " rot_angle = 0.0\n", + "\n", + " camera_length = float(factor) * float(nominal_cl_m)\n", + " return float(rot_angle), float(camera_length), float(conv_angle)\n", + "\n", + "\n", + "def safe_scan_shape(h5):\n", + " try:\n", + " md = h5[\"metadata\"]\n", + " shape = md[\"4D_shape\"][:2]\n", + " return [int(shape[0]), int(shape[1])]\n", + " except Exception:\n", + " pass\n", + "\n", + " try:\n", + " n = int(h5[\"data\"][\"frames\"].shape[0])\n", + " except Exception:\n", + " return None\n", + "\n", + " mapping = {\n", + " 262144: (512, 512),\n", + " 261632: (512, 511),\n", + " 261121: (511, 511),\n", + " 65536: (256, 256),\n", + " 65280: (256, 255),\n", + " 65025: (255, 255),\n", + " 16384: (128, 128),\n", + " 16256: (128, 127),\n", + " 16129: (127, 127),\n", + " }\n", + " if n in mapping:\n", + " return [mapping[n][0], mapping[n][1]]\n", + " return None\n", + "\n", + "\n", + "def generate_ptyrex_json(\n", + " meta_hdf_path,\n", + " template_json_path,\n", + " out_dir,\n", + " config_name=\"ptycho\",\n", + " overwrite=False,\n", + " factor=1.7,\n", + " verbose=True,\n", + "):\n", + " os.makedirs(out_dir, exist_ok=True)\n", + "\n", + " out_json = os.path.join(out_dir, f\"{config_name}.json\")\n", + " if os.path.exists(out_json) and (not overwrite):\n", + " if verbose:\n", + " print(f\"skip (exists): {out_json}\")\n", + " return out_json\n", + "\n", + " with open(template_json_path, \"r\") as f:\n", + " cfg = json.load(f)\n", + "\n", + " with h5py.File(meta_hdf_path, \"r\") as h5:\n", + " md = h5[\"metadata\"]\n", + " acc = float(md[\"ht_value(V)\"][()])\n", + " nCL = float(md[\"nominal_camera_length(m)\"][()])\n", + " aps = int(md[\"aperture_size\"][()])\n", + " rot = float(md[\"nominal_scan_rotation\"][()])\n", + "\n", + " rot_angle, cam_len, conv = meta2config(acc, nCL, aps, rot, factor=factor)\n", + "\n", + " scanN = safe_scan_shape(h5)\n", + " step = float(md[\"step_size(m)\"][()])\n", + " defocus_nm = float(md[\"defocus(nm)\"][()])\n", + "\n", + " cfg[\"base_dir\"] = out_dir\n", + " cfg[\"process\"][\"save_dir\"] = out_dir\n", + " cfg[\"experiment\"][\"data\"][\"data_path\"] = meta_hdf_path\n", + "\n", + " cfg[\"process\"][\"common\"][\"scan\"][\"rotation\"] = rot_angle\n", + " cfg[\"experiment\"][\"detector\"][\"position\"] = [0, 0, cam_len]\n", + " cfg[\"experiment\"][\"optics\"][\"lens\"][\"alpha\"] = float(conv) * 2.0 # 2* Semi_conv angle\n", + "\n", + " if scanN is not None:\n", + " cfg[\"process\"][\"common\"][\"scan\"][\"N\"] = scanN\n", + "\n", + " cfg[\"process\"][\"common\"][\"source\"][\"energy\"] = [acc]\n", + " cfg[\"process\"][\"common\"][\"scan\"][\"dR\"] = [step, step]\n", + " cfg[\"experiment\"][\"optics\"][\"lens\"][\"defocus\"] = [defocus_nm * 1e-9, defocus_nm * 1e-9]\n", + " cfg[\"process\"][\"save_prefix\"] = config_name\n", + "\n", + " with open(out_json, \"w\") as f:\n", + " json.dump(cfg, f, indent=4)\n", + "\n", + " if verbose:\n", + " print(f\"wrote: {out_json}\")\n", + " return out_json\n", + " # ================================== End of JSON template section ================================== # \n", + "\n", + "\n", + "\n", + "\n", + "\n", + " ###================================================================================================================================###\n", + " \n", + " ### Establishing Slurm submission via SSH (Wilson) ### \n", + "\n", + " ###================================================================================================================================###\n", + "\n", + "\n", + " # # # # # # # # # # # # # # # # # # # # # # #\n", + " # #\n", + " # .-\"\"\"\"-. #\n", + " # .' `. #\n", + " # / O O \\ #\n", + " # : __ : #\n", + " # | / \\ | #\n", + " # : \\__/ ; # \n", + " # \\ / #\n", + " # `. .' #\n", + " # `-....-' #\n", + " # #\n", + " # ~~~~~~~~~~~~~~~~~~ #\n", + " # ~~ W I L S O N ~~ #\n", + " # ~~~~~~~~~~~~~~~~ #\n", + "\n", + "\n", + "\n", + "def submit_job_via_ssh(script_path, host=\"wilson\", user=None):\n", + " try:\n", + " ssh_target = f\"{user}@{host}\" if user else host\n", + " ssh_process = subprocess.Popen(\n", + " [\"ssh\", \"-tt\", ssh_target],\n", + " stdin=subprocess.PIPE,\n", + " stdout=subprocess.PIPE,\n", + " stderr=subprocess.PIPE,\n", + " universal_newlines=True,\n", + " bufsize=0,\n", + " )\n", + "\n", + " ssh_process.stdin.write(f\"sbatch {script_path}\\n\")\n", + " ssh_process.stdin.write(\"logout\\n\")\n", + " ssh_process.stdin.close()\n", + "\n", + " output_lines = ssh_process.stdout.readlines()\n", + " ssh_process.wait(timeout=60)\n", + "\n", + " output = \"\".join(output_lines).strip()\n", + " job_id = None\n", + " success = False\n", + "\n", + " for line in output_lines:\n", + " if \"Submitted batch job\" in line:\n", + " job_id = line.strip().split()[-1]\n", + " success = True\n", + " break\n", + "\n", + " return success, job_id, output\n", + "\n", + " except subprocess.TimeoutExpired:\n", + " return False, None, \"SSH submission timed out\"\n", + " except Exception as e:\n", + " return False, None, str(e)\n", + "\n", + "\n", + "def submit_sbatch_json_via_ssh(bash_script_path: str, json_path: str, host=\"wilson\", user=None):\n", + " try:\n", + " ssh_target = f\"{user}@{host}\" if user else host\n", + " ssh_process = subprocess.Popen(\n", + " [\"ssh\", \"-tt\", ssh_target],\n", + " stdin=subprocess.PIPE,\n", + " stdout=subprocess.PIPE,\n", + " stderr=subprocess.PIPE,\n", + " universal_newlines=True,\n", + " bufsize=0\\\n", + " )\n", + "\n", + " cmd = f\"sbatch {shlex.quote(bash_script_path)} {shlex.quote(json_path)}\\n\"\n", + " ssh_process.stdin.write(cmd)\n", + " ssh_process.stdin.write(\"logout\\n\")\n", + " ssh_process.stdin.close()\n", + "\n", + " output_lines = ssh_process.stdout.readlines()\n", + " ssh_process.wait(timeout=60)\n", + "\n", + " output = \"\".join(output_lines).strip()\n", + " job_id = None\n", + " success = False\n", + "\n", + " for line in output_lines:\n", + " if \"Submitted batch job\" in line:\n", + " job_id = line.strip().split()[-1]\n", + " success = True\n", + " break\n", + "\n", + " return success, job_id, output\n", + "\n", + " except subprocess.TimeoutExpired:\n", + " return False, None, \"SSH submission timed out\"\n", + " except Exception as e:\n", + " return False, None, str(e)\n", + "\n", + "\n", + "def is_slurm_job_active(job_id: str, host=\"wilson\", user=None) -> bool:\n", + " if not job_id:\n", + " return False\n", + "\n", + " ssh_target = f\"{user}@{host}\" if user else host\n", + " try:\n", + " cmd = [\"ssh\", ssh_target, f\"squeue -h -j {shlex.quote(str(job_id))} -o %T\"]\n", + " r = subprocess.run(cmd, capture_output=True, text=True, timeout=30)\n", + " if r.returncode != 0:\n", + " return True \n", + " return bool((r.stdout or \"\").strip())\n", + " except Exception:\n", + " return True \n", + "\n", + " # ================================== End of Slurm submission section ================================== # \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " ###================================================================================================================================###\n", + " \n", + " ### General Functions to help avoid job submission errors ### \n", + "\n", + " ###================================================================================================================================###\n", + "\n", + "\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "''' \n", + " MIB \"stability\" helper monitors any new mib file for changes \n", + " MIB files will not be submitted until their are stationary i.e not chaning in size \n", + " This should avoid sending A mib file off while it is writing to the watched folder\n", + "'''\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "def is_file_stable(path: Path, min_age_sec: float = 10.0, stable_window_sec: float = 5.0) -> bool:\n", + " try:\n", + " st1 = path.stat()\n", + " except FileNotFoundError:\n", + " return False\n", + "\n", + " now = time.time()\n", + " if (now - st1.st_mtime) < min_age_sec:\n", + " return False\n", + "\n", + " time.sleep(stable_window_sec)\n", + "\n", + " try:\n", + " st2 = path.stat()\n", + " except FileNotFoundError:\n", + " return False\n", + "\n", + " return (st1.st_size == st2.st_size) and (st2.st_size > 0)\n", + " \n", + "\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "'''\n", + " Watcher helpers for locating meta file near mask.png\n", + " A minor crime but a crime nonetheless\n", + " Similar to the mib stability helper, this avoid premature job submission of the ptychography job by making sure teh mib has fully converted.\n", + " I use a mask.png file which is always produced last during the mib convertsion to trigger the ptycho job submission.\n", + "'''\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "def find_meta_file_near(mask_folder: Path, max_up: int = 3):\n", + " patterns = [\"*.hdf\", \"*.h5\", \"*.hdf5\"]\n", + "\n", + " for pat in patterns:\n", + " files = sorted(mask_folder.glob(pat))\n", + " if files:\n", + " return files[0]\n", + "\n", + " for sub in mask_folder.iterdir():\n", + " if not sub.is_dir():\n", + " continue\n", + " for pat in patterns:\n", + " files = sorted(sub.glob(pat))\n", + " if files:\n", + " return files[0]\n", + "\n", + " cur = mask_folder\n", + " for _ in range(max_up):\n", + " cur = cur.parent\n", + " for pat in patterns:\n", + " files = sorted(cur.glob(pat))\n", + " if files:\n", + " return files[0]\n", + "\n", + " return None\n", + "\n", + "\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "'''\n", + " PTYREX SUBMISSION LOCK+SENT FLAG HELPER\n", + " The watcher searchers for all ptycho JSON files and submits them. To avoid resubmitting those reconstructions we create a \"pty_sent.txt\"\n", + " When the json is submitted this txt file appears and then acts as a flag avoiding repeat submission\n", + "'''\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "def try_acquire_pty_lock(out_dir: Path) -> bool:\n", + " lock_path = out_dir / \"pty_submitting.lock\"\n", + " try:\n", + " fd = os.open(str(lock_path), os.O_CREAT | os.O_EXCL | os.O_WRONLY)\n", + " with os.fdopen(fd, \"w\") as f:\n", + " f.write(f\"pid={os.getpid()}\\n\")\n", + " f.write(f\"timestamp={datetime.now().isoformat(timespec='seconds')}\\n\")\n", + " return True\n", + " except FileExistsError:\n", + " return False\n", + "\n", + "\n", + "def release_pty_lock(out_dir: Path):\n", + " lock_path = out_dir / \"pty_submitting.lock\"\n", + " try:\n", + " lock_path.unlink()\n", + " except FileNotFoundError:\n", + " pass\n", + "\n", + "\n", + "def write_pty_sent_flag(out_dir: Path, job_id: str | None = None):\n", + " flag_path = out_dir / \"pty_sent.txt\"\n", + " ts = datetime.now().isoformat(timespec=\"seconds\")\n", + "\n", + " content = \"pty_sent\\n\"\n", + " if job_id:\n", + " content += f\"job_id={job_id}\\n\"\n", + " content += f\"timestamp={ts}\\n\"\n", + "\n", + " flag_path.write_text(content)\n", + " return flag_path\n", + "\n", + "\n", + "def should_skip_pty_submission(out_dir: Path) -> tuple[bool, str]:\n", + " sent_flag = out_dir / \"pty_sent.txt\"\n", + " no_resubmit_flag = out_dir / \"pty_noresubmit.txt\"\n", + " guard_flag = out_dir / \"pty_submitted.guard\"\n", + "\n", + " if sent_flag.exists():\n", + " return True, f\"already submitted (found {sent_flag.name})\"\n", + " if guard_flag.exists():\n", + " return True, f\"already submitted (found {guard_flag.name})\"\n", + " if no_resubmit_flag.exists():\n", + " return True, f\"no-resubmit marker present (found {no_resubmit_flag.name})\"\n", + " return False, \"\"\n", + "\n", + "\n", + "def canonical(p: Path) -> Path:\n", + " try:\n", + " return p.resolve()\n", + " except Exception:\n", + " return p\n", + "\n", + "\n", + " # ================================== End of General Functions / helpers section ================================== # \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " ###================================================================================================================================###\n", + " \n", + " ### Bash script and txt file writer (used for SLURM submission) ### \n", + "\n", + " ###================================================================================================================================###\n", + "\n", + "\n", + "\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "'''\n", + " CONVERT INFO TXT WRITER\n", + " This txt file is important as it defines the method in which the MIB conversion should occur.\n", + " Once these conditions are defined they make a \"convert_info.txt\" file which a subsequent bash script points to.\n", + "'''\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "\n", + "def _reshaping_flags(reshaping: str):\n", + " if reshaping == \"Auto_reshape\":\n", + " return True, False, False, False\n", + " elif reshaping == \"Flyback\":\n", + " return False, False, True, False\n", + " elif reshaping == \"Known_shape\":\n", + " return False, False, False, True\n", + " else:\n", + " return False, True, False, False\n", + "\n", + "\n", + "def write_convert_info_from_params(\n", + " to_convert_paths,\n", + " info_path: Path,\n", + " reshaping=\"Auto_reshape\",\n", + " Scan_X=0,\n", + " Scan_Y=0,\n", + " bin_sig_widget=2,\n", + " bin_nav_widget=2,\n", + " create_virtual_image=True,\n", + " mask_path=\"\",\n", + " disk_lower_thresh=0.01,\n", + " disk_upper_thresh=0.15,\n", + " DPC_check=True,\n", + " parallax_check=True,\n", + " iBF=True,\n", + " software_basedir=\"/dls_sw/e02/software/epsic_tools/epsic_tools/mib2hdfConvert/MIB_convert_widget/scripts/\",\n", + "):\n", + " auto_reshape, no_reshaping, use_fly_back, known_shape = _reshaping_flags(reshaping)\n", + "\n", + " if bin_sig_widget != 1:\n", + " bin_sig_flag, bin_sig_factor = 1, bin_sig_widget\n", + " else:\n", + " bin_sig_flag, bin_sig_factor = 0, bin_sig_widget\n", + "\n", + " if bin_nav_widget != 1:\n", + " bin_nav_flag, bin_nav_factor = 1, bin_nav_widget\n", + " else:\n", + " bin_nav_flag, bin_nav_factor = 0, bin_nav_widget\n", + "\n", + " if not mask_path:\n", + " mask_path = os.path.join(software_basedir, \"29042024_12bitmask.h5\")\n", + "\n", + " content = (\n", + " f\"to_convert_paths = {[str(p) for p in to_convert_paths]}\\n\"\n", + " f\"auto_reshape = {auto_reshape}\\n\"\n", + " f\"no_reshaping = {no_reshaping}\\n\"\n", + " f\"use_fly_back = {use_fly_back}\\n\"\n", + " f\"known_shape = {known_shape}\\n\"\n", + " f\"Scan_X = {Scan_X}\\n\"\n", + " f\"Scan_Y = {Scan_Y}\\n\"\n", + " f\"iBF = {iBF}\\n\"\n", + " f\"bin_sig_flag = {bin_sig_flag}\\n\"\n", + " f\"bin_sig_factor = {bin_sig_factor}\\n\"\n", + " f\"bin_nav_flag = {bin_nav_flag}\\n\"\n", + " f\"bin_nav_factor = {bin_nav_factor}\\n\"\n", + " f\"reshaping = {reshaping}\\n\"\n", + " f\"create_virtual_image = {create_virtual_image}\\n\"\n", + " f\"mask_path = {mask_path}\\n\"\n", + " f\"disk_lower_thresh = {disk_lower_thresh}\\n\"\n", + " f\"disk_upper_thresh = {disk_upper_thresh}\\n\"\n", + " f\"DPC = {DPC_check}\\n\"\n", + " f\"parallax = {parallax_check}\\n\"\n", + " )\n", + " info_path.write_text(content)\n", + "\n", + "\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "# SLURM script writer for MIB conversion\n", + "# Creates the bash script which is used in combination with the convert_info.txt for MIB conversion\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "def create_cluster_submit(convert_info_path: Path, array_max_index: int, submit_path: Path,\n", + " scripts_dir: Path,\n", + " MIB_SCRIPT_PATH: str):\n", + " content = f\"\"\"#!/usr/bin/env bash\n", + "#SBATCH --partition cs04r\n", + "#SBATCH --job-name mib_convert\n", + "#SBATCH --nodes 1\n", + "#SBATCH --tasks-per-node 1\n", + "#SBATCH --cpus-per-task 1\n", + "#SBATCH --time 05:00:00\n", + "#SBATCH --mem 192G\n", + "#SBATCH --array=0-{array_max_index}%1\n", + "#SBATCH --error={scripts_dir}/%j_error.err\n", + "#SBATCH --output={scripts_dir}/%j_output.out\n", + "\n", + "set -x\n", + "cd {scripts_dir}\n", + "module load python/epsic3.10\n", + "\n", + "export OMP_NUM_THREADS=${{SLURM_CPUS_PER_TASK}}\n", + "export BLOSC_NTHREADS=$((SLURM_CPUS_PER_TASK * 2))\n", + "sleep 10\n", + "\n", + "python {MIB_SCRIPT_PATH} {convert_info_path} $SLURM_ARRAY_TASK_ID\n", + "\"\"\"\n", + " submit_path.write_text(content)\n", + " submit_path.chmod(0o755)\n", + "\n", + " # ================================== End of Bash script and txt file writer section ================================== # \n", + "\n", + "\n", + "\n", + "\n", + "\n", + " ###================================================================================================================================###\n", + " \n", + " ### Json file Watcher and PtyREX submission ### \n", + "\n", + " ###================================================================================================================================###\n", + "\n", + "\n", + "\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "'''\n", + " WATCHER\n", + " creates JSON then submit PtyREX job on wilson\n", + "'''\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "def ensure_pty_out_when_mask_exists(\n", + " processed_root: Path,\n", + " done: set,\n", + " template_json_path: str,\n", + " ptyrex_bash_script_path: str,\n", + " ssh_host: str = \"wilson\",\n", + " ssh_user=None,\n", + " factor: float = 1.7,\n", + " overwrite_json: bool = False,\n", + " config_name: str = \"ptycho\",\n", + " verbose: bool = True,\n", + " max_submissions_per_pass: int = 1,\n", + "):\n", + " if not processed_root.exists():\n", + " return []\n", + "\n", + " submitted_job_ids: list[str] = []\n", + "\n", + " for mask_png in processed_root.rglob(\"mask.png\"):\n", + " mask_folder = mask_png.parent\n", + "\n", + " meta_path = find_meta_file_near(mask_folder, max_up=3)\n", + " if meta_path is None:\n", + " if verbose:\n", + " print(f\"[watch] mask present but no meta file near {mask_folder}\")\n", + " continue\n", + "\n", + " out_dir = canonical(meta_path.parent / \"pty_out\" / \"initial_recon\")\n", + " out_dir.mkdir(parents=True, exist_ok=True)\n", + "\n", + " if out_dir in done:\n", + " continue\n", + "\n", + " skip, reason = should_skip_pty_submission(out_dir)\n", + " if skip:\n", + " if verbose:\n", + " print(f\"[watch] skip: {reason} in {out_dir}\")\n", + " done.add(out_dir)\n", + " continue\n", + "\n", + " if not try_acquire_pty_lock(out_dir):\n", + " if verbose:\n", + " print(f\"[watch] skip: submission already in progress (lock exists in {out_dir})\")\n", + " continue\n", + "\n", + " try:\n", + " out_json = generate_ptyrex_json(\n", + " meta_hdf_path=str(meta_path),\n", + " template_json_path=template_json_path,\n", + " out_dir=str(out_dir),\n", + " config_name=config_name,\n", + " overwrite=overwrite_json,\n", + " factor=factor,\n", + " verbose=verbose,\n", + " )\n", + "\n", + " success, job_id, msg = submit_sbatch_json_via_ssh(\n", + " bash_script_path=ptyrex_bash_script_path,\n", + " json_path=str(out_json),\n", + " host=ssh_host,\n", + " user=ssh_user,\n", + " )\n", + "\n", + " if success:\n", + " if verbose:\n", + " print(f\"[watch] PtyREX submitted for {out_json} (job {job_id})\")\n", + "\n", + " write_pty_sent_flag(out_dir=out_dir, job_id=job_id)\n", + " done.add(out_dir)\n", + "\n", + " if job_id:\n", + " submitted_job_ids.append(job_id)\n", + "\n", + " if len(submitted_job_ids) >= max_submissions_per_pass:\n", + " if verbose:\n", + " print(f\"[watch] reached max_submissions_per_pass={max_submissions_per_pass}; stopping this poll.\")\n", + " return submitted_job_ids\n", + " else:\n", + " print(f\"[watch] PtyREX submission failed for {out_json}: {msg}\")\n", + "\n", + " except Exception as e:\n", + " print(f\"[watch] JSON generation or submission failed for {meta_path}: {e}\")\n", + "\n", + " finally:\n", + " release_pty_lock(out_dir)\n", + "\n", + " return submitted_job_ids\n", + " # ================================== End of Json file Watcher and PtyREX submission section ================================== # \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " ###================================================================================================================================###\n", + " \n", + " ### Excel logger ### \n", + "\n", + " ###================================================================================================================================###\n", + "\n", + "\n", + "\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "''' \n", + " EXCEL LOGGING HELPER\n", + " Not so important. I wanted to make a way of tracking the progress of a ptycho reconstruction\n", + " If a large number of PtyREX recons are running, it may be hard to track what data is ready to view\n", + " This excel sheet is designed to record data from any pty_out folder that a hdf file appears in (i.e your reconstruction)\n", + " I added a 600second delay in the logger, as sometimes it takes a few iteration before any observable data is indicated in the hdf\n", + " I need to update this section as a lot of information recording is verbose \n", + "'''\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "def _get_nested(d, keys, default=None):\n", + " cur = d\n", + " for k in keys:\n", + " if not isinstance(cur, dict) or k not in cur:\n", + " return default\n", + " cur = cur[k]\n", + " return cur\n", + "\n", + "\n", + "def find_json_near(path: Path, prefer_name: str = \"ptycho.json\"):\n", + " p1 = path.parent / prefer_name\n", + " if p1.exists():\n", + " return p1\n", + "\n", + " js = sorted(path.parent.glob(\"*.json\"))\n", + " if js:\n", + " return js[0]\n", + "\n", + " p2 = path.parent.parent / prefer_name\n", + " if p2.exists():\n", + " return p2\n", + "\n", + " js2 = sorted(path.parent.parent.glob(\"*.json\"))\n", + " if js2:\n", + " return js2[0]\n", + "\n", + " return None\n", + "\n", + "\n", + "def extract_recon_params_from_json(cfg: dict):\n", + " cam_len_m = None\n", + " pos = _get_nested(cfg, [\"experiment\", \"detector\", \"position\"], default=None)\n", + " if isinstance(pos, (list, tuple)) and len(pos) >= 3:\n", + " cam_len_m = pos[2]\n", + "\n", + " defocus = (\n", + " _get_nested(cfg, [\"experiment\", \"optics\", \"lens\", \"defocus\"], default=None)\n", + " or _get_nested(cfg, [\"experiment\", \"optics\", \"lens\", \"defocus_m\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"common\", \"optics\", \"lens\", \"defocus\"], default=None)\n", + " )\n", + " defocus_m = None\n", + " if isinstance(defocus, (list, tuple)) and len(defocus) > 0:\n", + " defocus_m = defocus[0]\n", + " elif isinstance(defocus, (int, float)):\n", + " defocus_m = defocus\n", + "\n", + " lens_alpha_rad = (\n", + " _get_nested(cfg, [\"experiment\", \"optics\", \"lens\", \"alpha\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"common\", \"optics\", \"lens\", \"alpha\"], default=None)\n", + " )\n", + "\n", + " probe_alpha = (\n", + " _get_nested(cfg, [\"process\", \"common\", \"probe\", \"alpha\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"common\", \"probes\", \"alpha\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"reconstruction\", \"probe\", \"alpha\"], default=None)\n", + " )\n", + " object_alpha = (\n", + " _get_nested(cfg, [\"process\", \"common\", \"object\", \"alpha\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"reconstruction\", \"object\", \"alpha\"], default=None)\n", + " )\n", + "\n", + " slice_thickness_m = (\n", + " _get_nested(cfg, [\"process\", \"common\", \"object\", \"slice_thickness\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"common\", \"object\", \"thickness\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"common\", \"object\", \"dz\"], default=None)\n", + " )\n", + "\n", + " slice_number = (\n", + " _get_nested(cfg, [\"process\", \"common\", \"object\", \"slice_number\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"common\", \"object\", \"nslices\"], default=None)\n", + " or _get_nested(cfg, [\"process\", \"common\", \"object\", \"n_slices\"], default=None)\n", + " )\n", + " if slice_number is None:\n", + " slices = _get_nested(cfg, [\"process\", \"common\", \"object\", \"slices\"], default=None)\n", + " if isinstance(slices, list):\n", + " slice_number = len(slices)\n", + "\n", + " energy = _get_nested(cfg, [\"process\", \"common\", \"source\", \"energy\"], default=None)\n", + " acc_v = None\n", + " if isinstance(energy, (list, tuple)) and len(energy) > 0:\n", + " acc_v = energy[0]\n", + " elif isinstance(energy, (int, float)):\n", + " acc_v = energy\n", + "\n", + " return {\n", + " \"acc_v\": acc_v,\n", + " \"camera_length_m\": cam_len_m,\n", + " \"defocus_m\": defocus_m,\n", + " \"lens_alpha_rad\": lens_alpha_rad,\n", + " \"probe_alpha\": probe_alpha,\n", + " \"object_alpha\": object_alpha,\n", + " \"slice_thickness_m\": slice_thickness_m,\n", + " \"slice_number\": slice_number,\n", + " }\n", + "\n", + "\n", + "def ensure_excel_log(excel_path: Path, sheet_name: str = \"ready_to_view\"):\n", + " headers = [\n", + " \"timestamp\",\n", + " \"message\",\n", + " \"recon_hdf_path\",\n", + " \"json_path\",\n", + " \"acc_v\",\n", + " \"camera_length_m\",\n", + " \"defocus_m\",\n", + " \"lens_alpha_rad\",\n", + " \"probe_alpha\",\n", + " \"object_alpha\",\n", + " \"slice_thickness_m\",\n", + " \"slice_number\",\n", + " ]\n", + "\n", + " excel_path.parent.mkdir(parents=True, exist_ok=True)\n", + "\n", + " if excel_path.exists():\n", + " wb = load_workbook(excel_path)\n", + " if sheet_name in wb.sheetnames:\n", + " ws = wb[sheet_name]\n", + " if ws.max_row == 0:\n", + " ws.append(headers)\n", + " else:\n", + " ws = wb.create_sheet(sheet_name)\n", + " ws.append(headers)\n", + " wb.save(excel_path)\n", + " return\n", + "\n", + " wb = Workbook()\n", + " ws = wb.active\n", + " ws.title = sheet_name\n", + " ws.append(headers)\n", + " wb.save(excel_path)\n", + "\n", + "\n", + "def append_ready_row_to_excel(\n", + " excel_path: Path,\n", + " message: str,\n", + " recon_hdf_path: Path,\n", + " json_path: Path | None,\n", + " params: dict,\n", + " sheet_name: str = \"ready_to_view\",\n", + "):\n", + " ensure_excel_log(excel_path, sheet_name=sheet_name)\n", + "\n", + " wb = load_workbook(excel_path)\n", + " ws = wb[sheet_name]\n", + "\n", + " row = [\n", + " datetime.now().isoformat(timespec=\"seconds\"),\n", + " message,\n", + " str(recon_hdf_path),\n", + " str(json_path) if json_path else \"\",\n", + " params.get(\"acc_v\"),\n", + " params.get(\"camera_length_m\"),\n", + " params.get(\"defocus_m\"),\n", + " params.get(\"lens_alpha_rad\"),\n", + " params.get(\"probe_alpha\"),\n", + " params.get(\"object_alpha\"),\n", + " params.get(\"slice_thickness_m\"),\n", + " params.get(\"slice_number\"),\n", + " ]\n", + " ws.append(row)\n", + " wb.save(excel_path)\n", + "\n", + "\n", + "def watch_ptyrex_hdf_ready(\n", + " processed_root: Path,\n", + " seen_hdfs: set,\n", + " excel_log_path: Path,\n", + " delay_sec: float = 600,\n", + " verbose: bool = True,\n", + " prefer_json_name: str = \"ptycho.json\",\n", + "):\n", + " if not processed_root.exists():\n", + " return 0\n", + "\n", + " triggered = 0\n", + "\n", + " for pty_out_dir in processed_root.rglob(\"pty_out\"):\n", + " if not pty_out_dir.is_dir():\n", + " continue\n", + "\n", + " for hdf_path in pty_out_dir.rglob(\"*.hdf\"):\n", + " hdf_path = canonical(hdf_path)\n", + " if hdf_path in seen_hdfs:\n", + " continue\n", + "\n", + " seen_hdfs.add(hdf_path)\n", + " folder_id = hdf_path.parent.name\n", + "\n", + " if verbose:\n", + " print(f\"[watch] detected recon HDF (pty_out): {hdf_path} (delaying {delay_sec}s)\")\n", + "\n", + " time.sleep(delay_sec)\n", + "\n", + " message = f\"ptycho {folder_id} ready to view\"\n", + " json_path = find_json_near(hdf_path, prefer_name=prefer_json_name)\n", + "\n", + " params = {\n", + " \"acc_v\": None,\n", + " \"camera_length_m\": None,\n", + " \"defocus_m\": None,\n", + " \"lens_alpha_rad\": None,\n", + " \"probe_alpha\": None,\n", + " \"object_alpha\": None,\n", + " \"slice_thickness_m\": None,\n", + " \"slice_number\": None,\n", + " }\n", + "\n", + " if json_path and json_path.exists():\n", + " try:\n", + " with open(json_path, \"r\") as f:\n", + " cfg = json.load(f)\n", + " params = extract_recon_params_from_json(cfg)\n", + " except Exception as e:\n", + " if verbose:\n", + " print(f\"[watch] could not parse json {json_path}: {e}\")\n", + " else:\n", + " if verbose:\n", + " print(f\"[watch] no json found near recon: {hdf_path}\")\n", + "\n", + " try:\n", + " append_ready_row_to_excel(\n", + " excel_path=excel_log_path,\n", + " message=message,\n", + " recon_hdf_path=hdf_path,\n", + " json_path=json_path,\n", + " params=params,\n", + " )\n", + " if verbose:\n", + " print(f\"[watch] logged to Excel: {excel_log_path}\")\n", + " except Exception as e:\n", + " print(f\"[watch] FAILED to write Excel log for {hdf_path}: {e}\")\n", + "\n", + " triggered += 1\n", + "\n", + " return triggered\n", + " \n", + " # ================================== End of Excel logger section ================================== # \n", + "\n", + "\n", + "\n", + "\n", + "\n", + "\n", + " ###================================================================================================================================###\n", + " \n", + " ### MAIN BODY ###\n", + " ### Loop, watcher, and submission execution ###\n", + "\n", + " ###================================================================================================================================###\n", + "\n", + "\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "\n", + "'''\n", + " MAIN WATCH LOOP RUNNER\n", + " Continuously monitors the experiment and processing directories and automatically submits SLURM jobs for MIB conversion and PtyREX\n", + " reconstruction.\n", + "\n", + " New MIB files are only processed once they are stable, job submission is rate-limited to avoid overloading the cluster, and completed\n", + " reconstructions are logged for viewing.\n", + "'''\n", + "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n", + "\n", + "def run_watch_loop(\n", + " year: str,\n", + " visit_id: str,\n", + " experiment_name: str,\n", + " template_json_path: str,\n", + " iBF: bool,\n", + " create_virtual_image: bool,\n", + " DPC_check: bool,\n", + " parallax_check: bool,\n", + "):\n", + " MIB_SCRIPT_PATH = \"/dls_sw/e02/software/epsic_tools/epsic_tools/mib2hdfConvert/MIB_convert_widget/scripts/MIB_convert_submit.py\"\n", + " PTYREX_BASH_SCRIPT = \"/dls/science/groups/e02/Joshua/Wilson_BashScript/Auto_ptycho_bash.sh\"\n", + "\n", + " SSH_HOST = \"wilson\"\n", + " SSH_USER = None \n", + "\n", + " ROOT_DIR = Path(\"/dls/e02/data\") / year / visit_id / \"Merlin\" / experiment_name\n", + " scripts_dir = ROOT_DIR / \"scripts\"\n", + " scripts_dir.mkdir(parents=True, exist_ok=True)\n", + "\n", + " PROCESSED_ROOT = Path(str(ROOT_DIR).replace(\"/Merlin/\", \"/processing/Merlin/\"))\n", + "\n", + " excel_log_path = PROCESSED_ROOT / \"ptyrex_ready_to_view_log.xlsx\"\n", + " ensure_excel_log(excel_log_path)\n", + "\n", + " print(f\"\\nExperiment directory:\\n {ROOT_DIR}\")\n", + " print(f\"Scripts directory:\\n {scripts_dir}\")\n", + " print(f\"Processed directory (watched):\\n {PROCESSED_ROOT}\")\n", + " print(f\"Excel log:\\n {excel_log_path}\")\n", + " print(f\"PtyREX template:\\n {template_json_path}\\n\")\n", + "\n", + " print(\"Conversion options:\")\n", + " print(f\" iBF = {iBF}\")\n", + " print(f\" create_virtual_image = {create_virtual_image}\")\n", + " print(f\" DPC = {DPC_check}\")\n", + " print(f\" parallax = {parallax_check}\\n\")\n", + "\n", + " ptyout_submitted_for = set()\n", + " seen_recon_hdfs = set()\n", + "\n", + " MAX_ACTIVE_MIB_JOBS = 4\n", + " MAX_ACTIVE_PTYREX_JOBS = 6\n", + "\n", + " active_mib_job_ids: set[str] = set()\n", + " active_ptyrex_job_ids: set[str] = set()\n", + "\n", + " seen_mibs: set[Path] = set()\n", + " pending_mibs: list[Path] = []\n", + " mib_candidates: dict[Path, float] = {}\n", + "\n", + " MIB_BATCH_SIZE = 4\n", + " MIB_MIN_AGE_SEC = 10.0\n", + " MIB_STABLE_WINDOW_SEC = 5.0\n", + "\n", + " print(\"Watching for new *_data.mib files (queued, max 4 active conversion jobs)...\")\n", + " print(\" - will only enqueue MIBs once they are stable (size not changing + old enough)\")\n", + " print(\"Watching for mask.png + meta file to create JSON and submit PtyREX (queued, max 4 active PtyREX jobs)...\")\n", + " print(\"Watching for recon *.hdf (ONLY under pty_out) to log 'ready to view' into Excel...\\n\")\n", + " POLL_INTERVAL = 1\n", + "\n", + " while True:\n", + " try:\n", + " # 1) Refresh active job lists\n", + " for jid in list(active_mib_job_ids):\n", + " if not is_slurm_job_active(jid, host=SSH_HOST, user=SSH_USER):\n", + " active_mib_job_ids.discard(jid)\n", + "\n", + " for jid in list(active_ptyrex_job_ids):\n", + " if not is_slurm_job_active(jid, host=SSH_HOST, user=SSH_USER):\n", + " active_ptyrex_job_ids.discard(jid)\n", + "\n", + " # 2) PtyREX submissions\n", + " available_ptyrex = MAX_ACTIVE_PTYREX_JOBS - len(active_ptyrex_job_ids)\n", + " if available_ptyrex > 0:\n", + " new_job_ids = ensure_pty_out_when_mask_exists(\n", + " processed_root=PROCESSED_ROOT,\n", + " done=ptyout_submitted_for,\n", + " template_json_path=template_json_path,\n", + " ptyrex_bash_script_path=PTYREX_BASH_SCRIPT,\n", + " ssh_host=SSH_HOST,\n", + " ssh_user=SSH_USER,\n", + " factor=1.7,\n", + " overwrite_json=False,\n", + " config_name=\"ptycho\",\n", + " verbose=True,\n", + " max_submissions_per_pass=min(available_ptyrex, 4),\n", + " )\n", + " for jid in new_job_ids:\n", + " if jid:\n", + " active_ptyrex_job_ids.add(jid)\n", + "\n", + " if new_job_ids:\n", + " print(f\"[watch] PtyREX submitted {len(new_job_ids)} job(s). \"\n", + " f\"active {len(active_ptyrex_job_ids)}/{MAX_ACTIVE_PTYREX_JOBS}\")\n", + "\n", + " # 3) Recon logging\n", + " watch_ptyrex_hdf_ready(\n", + " processed_root=PROCESSED_ROOT,\n", + " seen_hdfs=seen_recon_hdfs,\n", + " excel_log_path=excel_log_path,\n", + " delay_sec=10,\n", + " verbose=True,\n", + " prefer_json_name=\"ptycho.json\",\n", + " )\n", + "\n", + " # 4) Discover new MIBs\n", + " for mib in sorted(ROOT_DIR.rglob(\"*_data.mib\")):\n", + " mib = canonical(mib)\n", + " if mib in seen_mibs:\n", + " continue\n", + " mib_candidates.setdefault(mib, time.time())\n", + "\n", + " # Promote candidates only when stable\n", + " promote_list = []\n", + " for mib in list(mib_candidates.keys()):\n", + " if not mib.exists():\n", + " mib_candidates.pop(mib, None)\n", + " continue\n", + "\n", + " if is_file_stable(mib, min_age_sec=MIB_MIN_AGE_SEC, stable_window_sec=MIB_STABLE_WINDOW_SEC):\n", + " promote_list.append(mib)\n", + " mib_candidates.pop(mib, None)\n", + "\n", + " for mib in promote_list:\n", + " seen_mibs.add(mib)\n", + " pending_mibs.append(mib)\n", + " print(f\"[watch] MIB is stable; queued for conversion: {mib}\")\n", + "\n", + " # 5) Submit MIB conversion jobs up to available slots\n", + " available_mib = MAX_ACTIVE_MIB_JOBS - len(active_mib_job_ids)\n", + " if available_mib > 0 and pending_mibs:\n", + " for _ in range(available_mib):\n", + " if not pending_mibs:\n", + " break\n", + "\n", + " batch = pending_mibs[:MIB_BATCH_SIZE]\n", + " pending_mibs = pending_mibs[MIB_BATCH_SIZE:]\n", + "\n", + " ts = time.time_ns() # avoid collisions\n", + " temp_convert_info = scripts_dir / f\"convert_info_{ts}.txt\"\n", + "\n", + " write_convert_info_from_params(\n", + " to_convert_paths=batch,\n", + " info_path=temp_convert_info,\n", + " reshaping=\"Auto_reshape\",\n", + " Scan_X=0,\n", + " Scan_Y=0,\n", + " bin_sig_widget=2,\n", + " bin_nav_widget=2,\n", + " create_virtual_image=create_virtual_image, \n", + " mask_path=\"\",\n", + " disk_lower_thresh=0.01,\n", + " disk_upper_thresh=0.15,\n", + " DPC_check=DPC_check, \n", + " parallax_check=parallax_check, \n", + " iBF=iBF, \n", + " )\n", + "\n", + " temp_cluster_submit = scripts_dir / f\"cluster_submit_{ts}.sh\"\n", + " create_cluster_submit(\n", + " convert_info_path=temp_convert_info,\n", + " array_max_index=len(batch) - 1,\n", + " submit_path=temp_cluster_submit,\n", + " scripts_dir=scripts_dir,\n", + " MIB_SCRIPT_PATH=MIB_SCRIPT_PATH,\n", + " )\n", + "\n", + " print(f\"[watch] submitting MIB conversion batch: {len(batch)} file(s). \"\n", + " f\"active {len(active_mib_job_ids)}/{MAX_ACTIVE_MIB_JOBS} \"\n", + " f\"(queue remaining: {len(pending_mibs)})\")\n", + "\n", + " success, job_id, message = submit_job_via_ssh(\n", + " str(temp_cluster_submit), SSH_HOST, SSH_USER\n", + " )\n", + " if success and job_id:\n", + " active_mib_job_ids.add(job_id)\n", + " print(f\"[watch] MIB conversion submitted: job {job_id}\\n\")\n", + " else:\n", + " print(f\"[watch] MIB conversion submission failed: {message}\\n\")\n", + " pending_mibs = batch + pending_mibs\n", + " break\n", + "\n", + " time.sleep(POLL_INTERVAL)\n", + "\n", + " except KeyboardInterrupt:\n", + " print(\"Stopping watch loop.\")\n", + " break\n", + "\n", + "\n", + "# =========================================================\n", + "# NOTEBOOK UI:\n", + "# 1) tickboxes (images)\n", + "# 2) template selection\n", + "# 3) session details\n", + "# 4) start watchers\n", + "# =========================================================\n", + "def start_notebook_ui():\n", + " try:\n", + " from ipywidgets import Checkbox, VBox, Button, HTML, Dropdown, Text\n", + " from IPython.display import display\n", + " except Exception as e:\n", + " print(f\"ipywidgets not available ({e}); falling back to CLI prompts.\")\n", + " return start_cli_flow()\n", + "\n", + " # 1) \"Select images\" tickboxes\n", + " cb_iBF = Checkbox(value=True, description=\"iBF\")\n", + " cb_virtual = Checkbox(value=True, description=\"Create virtual image\")\n", + " cb_DPC = Checkbox(value=True, description=\"DPC\")\n", + " cb_parallax = Checkbox(value=True, description=\"Parallax\")\n", + "\n", + " # 2) Template selection\n", + " template_dd = Dropdown(\n", + " options=list(TEMPLATE_MAP.keys()),\n", + " value=\"300 KeV\" if \"300 KeV\" in TEMPLATE_MAP else list(TEMPLATE_MAP.keys())[0],\n", + " description=\"Template:\",\n", + " )\n", + "\n", + " # 3) Session details\n", + " year_txt = Text(value=\"\", description=\"Year:\", placeholder=\"e.g. 2026\")\n", + " visit_txt = Text(value=\"\", description=\"Visit:\", placeholder=\"e.g. mg12345-1\")\n", + " exp_txt = Text(value=\"\", description=\"Experiment:\", placeholder=\"experiment folder name\")\n", + "\n", + " # 4) Start\n", + " run_button = Button(description=\"Start auto processing\", button_style=\"success\")\n", + " status = HTML(value=\"Step 1: Select tick boxes → template → session details, then Start.\")\n", + "\n", + " ui = VBox([\n", + " status,\n", + " HTML(value=\"
1) Select images/options\"),\n", + " cb_iBF, cb_virtual, cb_DPC, cb_parallax,\n", + " HTML(value=\"
2) Select PtyREX template\"),\n", + " template_dd,\n", + " HTML(value=\"
3) Session details\"),\n", + " year_txt, visit_txt, exp_txt,\n", + " HTML(value=\"
\"),\n", + " run_button,\n", + " ])\n", + " display(ui)\n", + "\n", + " def _on_run_clicked(_):\n", + " year = year_txt.value.strip()\n", + " visit = visit_txt.value.strip()\n", + " exp = exp_txt.value.strip()\n", + " template_json_path = TEMPLATE_MAP.get(template_dd.value, \"\")\n", + "\n", + " if not year or not visit or not exp:\n", + " status.value = \"Please fill Year, Visit, and Experiment.\"\n", + " return\n", + "\n", + " if not template_json_path:\n", + " status.value = \"Template selection is invalid.\"\n", + " return\n", + "\n", + " status.value = (\n", + " \"Starting watchers with:
\"\n", + " f\"iBF={cb_iBF.value}, create_virtual_image={cb_virtual.value}, \"\n", + " f\"DPC={cb_DPC.value}, parallax={cb_parallax.value}
\"\n", + " f\"template={template_dd.value}
\"\n", + " f\"year={year}, visit={visit}, experiment={exp}\"\n", + " )\n", + "\n", + " run_watch_loop(\n", + " year=year,\n", + " visit_id=visit,\n", + " experiment_name=exp,\n", + " template_json_path=template_json_path,\n", + " iBF=cb_iBF.value,\n", + " create_virtual_image=cb_virtual.value,\n", + " DPC_check=cb_DPC.value,\n", + " parallax_check=cb_parallax.value,\n", + " )\n", + "\n", + " run_button.on_click(_on_run_clicked)\n", + "\n", + "\n", + "# =========================================================\n", + "# CLI flow:\n", + "# 1) tickboxes (y/n)\n", + "# 2) template selection\n", + "# 3) session details\n", + "# 4) start watchers\n", + "# =========================================================\n", + "def start_cli_flow():\n", + " print(\"\\nStep 1) Select images/options\")\n", + " iBF = ask_bool_cli(\"Enable iBF?\", default=True)\n", + " create_virtual_image = ask_bool_cli(\"Create virtual image?\", default=True)\n", + " DPC_check = ask_bool_cli(\"Enable DPC?\", default=True)\n", + " parallax_check = ask_bool_cli(\"Enable parallax?\", default=True)\n", + "\n", + " print(\"\\nStep 2) Select PtyREX template\")\n", + " template_json_path = select_template_cli(TEMPLATE_MAP)\n", + "\n", + " print(\"\\nStep 3) Session details\")\n", + " year = input(\"Enter year (yyyy): \").strip()\n", + " visit_id = input(\"Enter visit ID (e.g. mg12345-1): \").strip()\n", + " experiment_name = input(\"Enter experiment folder name: \").strip()\n", + "\n", + " print(\"\\nStep 4) Starting watchers...\\n\")\n", + " run_watch_loop(\n", + " year=year,\n", + " visit_id=visit_id,\n", + " experiment_name=experiment_name,\n", + " template_json_path=template_json_path,\n", + " iBF=iBF,\n", + " create_virtual_image=create_virtual_image,\n", + " DPC_check=DPC_check,\n", + " parallax_check=parallax_check,\n", + " )\n", + "\n", + "\n", + "if __name__ == \"__main__\":\n", + " if _in_notebook():\n", + " start_notebook_ui()\n", + " else:\n", + " start_cli_flow()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9bc37559-83b5-49ba-a3a7-ad033fb8073b", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.10 - EPSIC [DLS Conda]", + "language": "python", + "name": "conda-env-DLS_Conda-epsic3.10-kernel.json" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}