From c27ce1dc4d0fd9a34b2ae8d367ef1959ae1bb063 Mon Sep 17 00:00:00 2001 From: Henry McNamara <160671431+hmac213@users.noreply.github.com> Date: Mon, 21 Jul 2025 13:07:11 -0700 Subject: [PATCH 01/12] FIX deprecated ndarray.tostring() -> tobytes() (#571) --- cortex/dataset/braindata.py | 2 +- cortex/formats.pyx | 2 +- cortex/freesurfer.py | 6 +++--- cortex/webgl/serve.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/cortex/dataset/braindata.py b/cortex/dataset/braindata.py index 6be206e7..cf0f8f19 100644 --- a/cortex/dataset/braindata.py +++ b/cortex/dataset/braindata.py @@ -628,7 +628,7 @@ def __getitem__(self, masktype): def _hash(array): '''A simple numpy hash function''' - return hashlib.sha1(array.tostring()).hexdigest() + return hashlib.sha1(array.tobytes()).hexdigest() def _hdf_write(h5, data, name="data", group="/data"): try: diff --git a/cortex/formats.pyx b/cortex/formats.pyx index 7b9951a2..bc254194 100644 --- a/cortex/formats.pyx +++ b/cortex/formats.pyx @@ -184,7 +184,7 @@ def write_stl(filename, object pts, object polys): data['f1'] = pts[polys].reshape(-1, 9) with open(filename, 'wb') as fp: fp.write(struct.pack('80xI', len(polys))) - fp.write(data.tostring()) + fp.write(data.tobytes()) diff --git a/cortex/freesurfer.py b/cortex/freesurfer.py index 2349b27d..4d21d901 100644 --- a/cortex/freesurfer.py +++ b/cortex/freesurfer.py @@ -422,8 +422,8 @@ def write_surf(filename, pts, polys, comment=''): fp.write(b'\xff\xff\xfe') fp.write((comment+'\n\n').encode()) fp.write(struct.pack('>2I', len(pts), len(polys))) - fp.write(pts.astype(np.float32).byteswap().tostring()) - fp.write(polys.astype(np.uint32).byteswap().tostring()) + fp.write(pts.astype(np.float32).byteswap().tobytes()) + fp.write(polys.astype(np.uint32).byteswap().tobytes()) fp.write(b'\n') @@ -961,7 +961,7 @@ def write_decimated(path, pts, polys): with open(path+'.full.patch.3d', 'w') as fp: fp.write(struct.pack('>i', -1)) fp.write(struct.pack('>i', len(dpts))) - fp.write(data.tostring()) + fp.write(data.tobytes()) class SpringLayout(object): diff --git a/cortex/webgl/serve.py b/cortex/webgl/serve.py index 2499ded6..067f70d3 100644 --- a/cortex/webgl/serve.py +++ b/cortex/webgl/serve.py @@ -47,7 +47,7 @@ def default(self, obj): __class__="NParray", dtype=obj.dtype.descr[0][1], shape=obj.shape, - data=binascii.b2a_base64(obj.tostring()).decode('utf-8')) + data=binascii.b2a_base64(obj.tobytes()).decode('utf-8')) elif isinstance(obj, (np.int64, np.int32, np.int16, np.int8, np.uint64, np.uint32, np.uint16, np.uint8)): return int(obj) From ae36f17dec0e80c9a141baa4828a428fd32114b6 Mon Sep 17 00:00:00 2001 From: Tomas Knapen Date: Mon, 21 Jul 2025 22:49:25 +0200 Subject: [PATCH 02/12] ENH add new colormaps from Knapen Lab (#574) Some of these are quite nice to have when doing retinotopy. --- filestore/colormaps/HCP_MMP1.png | Bin 0 -> 728 bytes filestore/colormaps/Retinotopy_HSV_2x_alpha.png | Bin 0 -> 718 bytes filestore/colormaps/Retinotopy_HSV_alpha.png | Bin 0 -> 706 bytes filestore/colormaps/custom2D_RB_bins_256.png | Bin 0 -> 834 bytes filestore/colormaps/eccentricity_alpha_2D.png | Bin 0 -> 900 bytes filestore/colormaps/nipy_spectral_alpha.png | Bin 0 -> 802 bytes filestore/colormaps/seismic_alpha.png | Bin 0 -> 696 bytes filestore/colormaps/spectral_alpha.png | Bin 0 -> 876 bytes 8 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 filestore/colormaps/HCP_MMP1.png create mode 100644 filestore/colormaps/Retinotopy_HSV_2x_alpha.png create mode 100644 filestore/colormaps/Retinotopy_HSV_alpha.png create mode 100644 filestore/colormaps/custom2D_RB_bins_256.png create mode 100644 filestore/colormaps/eccentricity_alpha_2D.png create mode 100644 filestore/colormaps/nipy_spectral_alpha.png create mode 100644 filestore/colormaps/seismic_alpha.png create mode 100644 filestore/colormaps/spectral_alpha.png diff --git a/filestore/colormaps/HCP_MMP1.png b/filestore/colormaps/HCP_MMP1.png new file mode 100644 index 0000000000000000000000000000000000000000..442031efac2b1a9b59c585414cb482f2b8138294 GIT binary patch literal 728 zcmV;}0w?{6P)*mxQbzRrhRd@ace}KMc$Qa?_VbnSw zLa7^Z-MmYdC}<{0Q`_`;5TJNVG@mAu>>8m`nbE|YNY#a-G5>}*ao zWA)j|Ps`@h=Z}1IbrOe|qDxiusGnoM^`200gyrc0>^d)A(GcnOshHkQ!Qr=}F3jPL zCrJ2yGnvJEaoPH)|Ip6j2rbP&zGFU;M&zK1C%QIf<`g1?5~$jI41;1JKe3`0pXH~d zlu3_^bHi>b6(Ss}JoK3<{BC{3M1+ixSWB0yhhLsPwpe!3+u6mWJj9%3t0*~I%!xCv zF|=Q2*7|(9hXlf1^GPm9A^O)l8ozudXxxZ-egnA~n>bxu&O_@n6fza=n1e(+{rEB}4OsD!%JjR9nXlx?<9T(VEXyMM+00TcNP^tuGY7($4OXHpH6-(TC)O7Y@ zSUH=AwRK!{{~>)?!h`5O%7a>T>S#i>o8+4V2($xhx@a>l;I zcD_-j?UTj#3^Jp`%h&u`=XM}oh(UnC0b~yg)fg-e)HYE7=r%^Gx)J1Q2cVy+=WA#% xGJ@Pn?EpQ%qrlJrjDH3W25K>=WzwHdte;gncxH3>@&MB*gQu&X%Q~loCIEw%R1^RJ literal 0 HcmV?d00001 diff --git a/filestore/colormaps/Retinotopy_HSV_alpha.png b/filestore/colormaps/Retinotopy_HSV_alpha.png new file mode 100644 index 0000000000000000000000000000000000000000..33cac277dd3bc5afeb13d885e4a265870a6a8f9f GIT binary patch literal 706 zcmeAS@N?(olHy`uVBq!ia0y~yU<5K5893O0R7}x|GzJEyPEQxdkcwMxuNd+*CVK=1o$7pHvD zbzAG1)(|hmAi&@NGMj~J3>F7!n3i#C1CI^uT%tw z)xi!{$7TlsP_P~3V*)aP9))=x7*MLRz+n3@IhvzPfWu9PLkbj%1rIGa&Lue-Bsl`L z`F(c68KkgqOgPfC02Y1wJ=@`Iu$zI-`LRHtsXN>mXdp5I8g7TKn3#A+^41Q<1N-(f U1*-3S2u$w`p00i_>zopr04eW z_}x?aZos$wtIuhzV!p_ytL^vPmR-PO;uM{V0`2+xzkiwdMK|-dQD1V-@~>r4`N}Kb z{*BT-e`QmB{QS?WboXaHS?p$aYg%;gomUdoym7x z{O9%UYp&0qUTJ$Q_usQ=(|(uB-oG{J%dN}W#q++X?A2O3Y2K{r;yKy=$yaN>NkoVK zlDYk9-Er%4)!%-+KJ!^7MT-e}DX$n*ZnYy`wvq|9tspN0j}wZ>jTi&!<2CTNi&n zZvE|#+nN6>GXevKg+YM90Te=1VK@Mdp^8Ds4q_CbsvE&Zvp7)E*HFg_0BI_PBitMZ shCjRt3=IqnObi?h)M8M}q&=U-ZQP$*+4W3z2IejXPgg&ebxsLQ0NqN*F8}}l literal 0 HcmV?d00001 diff --git a/filestore/colormaps/nipy_spectral_alpha.png b/filestore/colormaps/nipy_spectral_alpha.png new file mode 100644 index 0000000000000000000000000000000000000000..92ecd8891296426699e4bea65cd45912db65a204 GIT binary patch literal 802 zcmeAS@N?(olHy`uVBq!ia0y~yU<5K5893O0R7}x|GzJEym!2+;Ar-gYUfG>@$U(&Q zqVcw+4I732{kKk@bf{lMZfVak5tB&n*q@jJP=aQ(~4JkQ_FN;&f}=jL>k*h%~1F5jE9N#?q4u66d_wYBeyH`(^3pFa0p zc>hz=-^wSeft-1hc%qHoe3dHOubOJRF8fa8ydCF@x7W}8b-n(3-1^e=S(ASMy~6wT z_W^qq1_g!&plca8sK(%Eptgw$K({eb)r}xeHvs)iJzqnEkqP8hst4$G8hD;MMjnW- aW5~~%T=-(eg)6|c&*16m=d#Wzp$Pz0LzGMa literal 0 HcmV?d00001 diff --git a/filestore/colormaps/seismic_alpha.png b/filestore/colormaps/seismic_alpha.png new file mode 100644 index 0000000000000000000000000000000000000000..57951e40c8e6ff9a17958fb1bbbc890c092930be GIT binary patch literal 696 zcmeAS@N?(olHy`uVBq!ia0y~yU<5K5893O0R7}x|GzJEydQTU}kcwMxuWS@#Ruo`4 z*r;yvRQ{~^iD`>jUaX6He#SWM%JVCi-=}TAb8N=it6OKkt-7DR{e0&2tYCU9bQ=d%-3azHlL8fe4Gl&PkXxzjeFcVlR5p=%jyzx=zoPU= U&!*Je{|rFj>FVdQ&MBb@0C?hDF8}}l literal 0 HcmV?d00001 diff --git a/filestore/colormaps/spectral_alpha.png b/filestore/colormaps/spectral_alpha.png new file mode 100644 index 0000000000000000000000000000000000000000..4058f2f3d34ff85735a9f43f07e6730d217a11fd GIT binary patch literal 876 zcmeAS@N?(olHy`uVBq!ia0y~yU<5K5893O0R7}x|GzJD{V^0^ykcwMxuXq>THV|;V zc>U}e9qzM!oGA`ke`ZXxId2)NwZU=1B-<*jRVXaCOXuReb7>&#a_pT<77^j^Aa)87qVx>HMHzh%$w-|MwY z{(Ri|J@>Znt^U8bz9jbRmj4fT-m-uGdil<{`y2PzfBzDrT72$I&zUo4Dx;0}-TeLK zOsTAKWP0kElryJxt^fb6c((lczi-Z%$zT8ev+Dh~J3s#P8EPf3&8@A~E7tq`>)V^$ zd+UwUs?J>5wbnK*>&%y0ySH}Tq~sVQtf^f?(g7!(*97#NtS#$ZyQwuu}J z91YZUBgoSVKtEH@*N|Xr0EQy<1N6at8aR|XMjo)PXEa_do-HZ7XALkHF?hQAxvX Date: Mon, 21 Jul 2025 16:20:46 -0500 Subject: [PATCH 03/12] NF: Clipping sliceplanes (#556) * working clipping * cleaned up shaders * added comments --------- Co-authored-by: Alexander Huth Co-authored-by: Alexander Huth --- cortex/anat.py | 2 +- cortex/webgl/resources/js/dataset.js | 12 +++++++ cortex/webgl/resources/js/mriview.js | 48 +++++++++++++++++++++++++ cortex/webgl/resources/js/shaderlib.js | 29 +++++++++++++++ cortex/webgl/resources/js/sliceplane.js | 37 ++++++++++++++++++- 5 files changed, 126 insertions(+), 2 deletions(-) diff --git a/cortex/anat.py b/cortex/anat.py index e2983fa0..7d747927 100644 --- a/cortex/anat.py +++ b/cortex/anat.py @@ -73,7 +73,7 @@ def voxelize(outfile, subject, surf='wm', mp=True): import nibabel from . import polyutils nib = db.get_anat(subject, "raw") - shape = nib.get_shape() + shape = nib.shape vox = np.zeros(shape, dtype=bool) for pts, polys in db.get_surf(subject, surf, nudge=False): xfm = Transform(np.linalg.inv(nib.affine), nib) diff --git a/cortex/webgl/resources/js/dataset.js b/cortex/webgl/resources/js/dataset.js index dd156b90..c44263ec 100644 --- a/cortex/webgl/resources/js/dataset.js +++ b/cortex/webgl/resources/js/dataset.js @@ -83,6 +83,18 @@ var dataset = (function(module) { this.uniforms = { framemix: { type:'f', value:0}, dataAlpha: { type:'f', value:1.0}, + + slicexn: { type:'v3', value:new THREE.Vector3( 0,0,0 )}, + sliceyn: { type:'v3', value:new THREE.Vector3( 0,0,0 )}, + slicezn: { type:'v3', value:new THREE.Vector3( 0,0,0 )}, + + slicexc: { type:'v3', value:new THREE.Vector3( 0,0,0 )}, + sliceyc: { type:'v3', value:new THREE.Vector3( 0,0,0 )}, + slicezc: { type:'v3', value:new THREE.Vector3( 0,0,0 )}, + + doslicex: { type:'i', value:false}, + doslicey: { type:'i', value:false}, + doslicez: { type:'i', value:false}, } if (!this.vertex) { diff --git a/cortex/webgl/resources/js/mriview.js b/cortex/webgl/resources/js/mriview.js index 75e5baa6..ff536df6 100644 --- a/cortex/webgl/resources/js/mriview.js +++ b/cortex/webgl/resources/js/mriview.js @@ -49,6 +49,9 @@ var mriview = (function(module) { y: new sliceplane.Plane(this, 1), z: new sliceplane.Plane(this, 2), }; + this._clipx = false; + this._clipy = false; + this._clipz = false; this.ui = new jsplot.Menu(); this.ui.addEventListener("update", this.schedule.bind(this)); @@ -1164,6 +1167,18 @@ var mriview = (function(module) { rotate_z: {action:[this.sliceplanes.z, 'setAngle', -89, 89]} }); + var sliceplane_clip = sliceplane_ui.addFolder("clip", true); + sliceplane_clip.add({ + clip_x: {action:[this, "setClippingX"]}, + flip_x: {action:[this.sliceplanes.x, "setFlip"]}, + clip_y: {action:[this, "setClippingY"]}, + flip_y: {action:[this.sliceplanes.y, "setFlip"]}, + clip_z: {action:[this, "setClippingZ"]}, + flip_z: {action:[this.sliceplanes.z, "setFlip"]}, + }) + + // + if ($(this.object).find("#colormap_category").length > 0) { $(this.object).find("#colormap").ddslick({ width:296, height:350, onSelected: function() { @@ -1294,6 +1309,39 @@ var mriview = (function(module) { this.sliceplanes.z.setVisible(!this.sliceplanes.z._visible); viewer.schedule(); }; + module.Viewer.prototype.setClippingX = function(val) { + if (val === undefined) + return this._clipx; + + this._clipx = val; + + if (this.active !== undefined) { + this.active.uniforms.doslicex.value = this._clipx; + } + this.schedule(); + } + module.Viewer.prototype.setClippingY = function(val) { + if (val === undefined) + return this._clipy; + + this._clipy = val; + + if (this.active !== undefined) { + this.active.uniforms.doslicey.value = this._clipy; + } + this.schedule(); + } + module.Viewer.prototype.setClippingZ = function(val) { + if (val === undefined) + return this._clipz; + + this._clipz = val; + + if (this.active !== undefined) { + this.active.uniforms.doslicez.value = this._clipz; + } + this.schedule(); + } return module; }(mriview || {})); diff --git a/cortex/webgl/resources/js/shaderlib.js b/cortex/webgl/resources/js/shaderlib.js index aa8904f9..9eaeb73a 100644 --- a/cortex/webgl/resources/js/shaderlib.js +++ b/cortex/webgl/resources/js/shaderlib.js @@ -379,6 +379,7 @@ var Shaderlib = (function() { "varying float vCurv;", "varying float vMedial;", "varying float vThickmix;", + "varying vec3 vWorldPosition;", // "varying float vDrop;", "varying vec3 vPos_x[2];", @@ -444,6 +445,7 @@ var Shaderlib = (function() { "gl_Position = projectionMatrix * modelViewMatrix * vec4( pos, 1.0 );", + "vWorldPosition = pos;", "}" ].join("\n"); @@ -477,6 +479,18 @@ var Shaderlib = (function() { "uniform vec2 dshape[2];", "uniform sampler2D data[4];", + "uniform vec3 slicexn;", // normal vector for the x sliceplane + "uniform vec3 sliceyn;", + "uniform vec3 slicezn;", + + "uniform vec3 slicexc;", // centerpoint of the x sliceplane + "uniform vec3 sliceyc;", + "uniform vec3 slicezc;", + + "uniform bool doslicex;", // should we clip the surface on one side of the x sliceplane? + "uniform bool doslicey;", + "uniform bool doslicez;", + // "uniform float hatchAlpha;", // "uniform vec3 hatchColor;", // "uniform sampler2D hatch;", @@ -489,6 +503,7 @@ var Shaderlib = (function() { "varying float vCurv;", "varying float vMedial;", "varying float vThickmix;", + "varying vec3 vWorldPosition;", // the x,y,z coordinates of this pixel utils.standard_frag_vars, utils.rand, @@ -498,9 +513,23 @@ var Shaderlib = (function() { utils.samplers, "void main() {", + //Sliceplane Clipping + "bool clipx = dot(vWorldPosition - slicexc, slicexn) > 0.0;", // is this pixel on the wrong side of the x sliceplane? + "bool clipy = dot(vWorldPosition - sliceyc, sliceyn) > 0.0;", + "bool clipz = dot(vWorldPosition - slicezc, slicezn) > 0.0;", + + "if (clipx && doslicex && !doslicey && !doslicez) discard;", // clip only in x + "if (clipy && !doslicex && doslicey && !doslicez) discard;", // clip only in y + "if (clipz && !doslicex && !doslicey && doslicez) discard;", // clip only in z + "if (clipx && clipy && doslicex && doslicey && !doslicez) discard;", // clip in x and y + "if (clipx && clipz && doslicex && !doslicey && doslicez) discard;", // clip in x and z + "if (clipy && clipz && !doslicex && doslicey && doslicez) discard;", // clip in y and z + "if (clipx && clipy && clipz && doslicex && doslicey && doslicez) discard;", // clip in x, y, and z + //Curvature Underlay "float ctmp = clamp(vCurv / smoothness, -0.5, 0.5);", // use limits here too "float curv = clamp(ctmp * contrast + brightness, 0.0, 1.0);", + "vec4 cColor = vec4(vec3(curv), 1.0);", "vec3 coord_x, coord_y;", diff --git a/cortex/webgl/resources/js/sliceplane.js b/cortex/webgl/resources/js/sliceplane.js index d18af74c..aba5a06b 100644 --- a/cortex/webgl/resources/js/sliceplane.js +++ b/cortex/webgl/resources/js/sliceplane.js @@ -56,6 +56,8 @@ var sliceplane = (function(module) { this.mesh = new THREE.Mesh(this.geometry, this.shader); this.mesh.doubleSided = true; + this.flip_clip = 1; // 1 or -1 + this.object.add(this.mesh); //this.scene.add(this.mesh); this.scene.add(this.object); @@ -127,6 +129,9 @@ var sliceplane = (function(module) { imat.multiplyVector3(this.geometry.vertices[2].set(shape[0]-0.5,-0.5,slice)); imat.multiplyVector3(this.geometry.vertices[3].set(shape[0]-0.5,shape[1]-0.5,slice)); } + this.center = new THREE.Vector3().add(this.geometry.vertices[0]).add(this.geometry.vertices[1]).add(this.geometry.vertices[2]).add(this.geometry.vertices[3]).divideScalar(4); + + this.updateClipping(); this.geometry.computeBoundingSphere(); var center = this.geometry.boundingSphere.center; @@ -184,6 +189,8 @@ var sliceplane = (function(module) { this.mesh.rotation.set(0,0,0); this.mesh.rotateOnAxis(axis, angle / 180 * Math.PI); + + this.updateClipping(); } module.Plane.prototype.setVisible = function(val) { if (val === undefined) @@ -191,8 +198,36 @@ var sliceplane = (function(module) { this._visible = val; - if (this.mesh !== undefined) + if (this.mesh !== undefined) { this.mesh.visible = this._visible; + this.updateClipping(); + } + + } + module.Plane.prototype.updateClipping = function() { + this.normal = this.geometry.faces[0].normal.clone().applyEuler(this.mesh.rotation).multiplyScalar(this.flip_clip); + + if (this.dir == 0){ + this.viewer.active.uniforms.slicexn.value.copy(this.normal); + this.viewer.active.uniforms.slicexc.value.copy(this.center); + } else if (this.dir == 1){ + this.viewer.active.uniforms.sliceyn.value.copy(this.normal); + this.viewer.active.uniforms.sliceyc.value.copy(this.center); + } else if (this.dir == 2){ + this.viewer.active.uniforms.slicezn.value.copy(this.normal); + this.viewer.active.uniforms.slicezc.value.copy(this.center); + } + } + module.Plane.prototype.setFlip = function(val) { + if (val === undefined) { + return this.flip_clip == -1; + } + if (val) { + this.flip_clip = -1; + } else { + this.flip_clip = 1; + } + this.updateClipping(); } module.MIP = function(viewer) { From 77dcc6db3f482b80f08e30c78974bbe3265d7f09 Mon Sep 17 00:00:00 2001 From: Mark Lescroart Date: Thu, 7 Aug 2025 11:18:23 -0700 Subject: [PATCH 04/12] FIX: removed deprecated np.float_ commands in cx.xfm, replaced with np.float64 (#575) --- cortex/xfm.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cortex/xfm.py b/cortex/xfm.py index cbdae9bd..d74bfe5c 100644 --- a/cortex/xfm.py +++ b/cortex/xfm.py @@ -107,7 +107,7 @@ def from_fsl(cls, xfm, func_nii, anat_nii): if isinstance(xfm, str): with open(xfm, 'r') as fid: L = fid.readlines() - xfm = np.array([[np.float_(s) for s in ll.split() if s] for ll in L]) + xfm = np.array([[np.float64(s) for s in ll.split() if s] for ll in L]) # Internally, pycortex computes the OPPOSITE transform: from anatomical volume to functional volume. # Thus, assign anat to "infile" (starting point for transform) @@ -246,7 +246,7 @@ def from_freesurfer(cls, fs_register, func_nii, subject, freesurfer_subject_dir= if isinstance(fs_register, str): with open(fs_register, 'r') as fid: L = fid.readlines() - anat2func = np.array([[np.float_(s) for s in ll.split() if s] for ll in L[4:8]]) + anat2func = np.array([[np.float64(s) for s in ll.split() if s] for ll in L[4:8]]) else: anat2func = fs_register @@ -261,7 +261,7 @@ def from_freesurfer(cls, fs_register, func_nii, subject, freesurfer_subject_dir= try: cmd = ('mri_info', '--vox2ras', anat_mgz) L = decode(subprocess.check_output(cmd)).splitlines() - anat_vox2ras = np.array([[np.float_(s) for s in ll.split() if s] for ll in L]) + anat_vox2ras = np.array([[np.float64(s) for s in ll.split() if s] for ll in L]) except OSError: print ("Error occurred while executing:\n{}".format(' '.join(cmd))) raise @@ -379,7 +379,7 @@ def _vox2ras_tkr(image): # unpredictable. L = L[-4:] tkrvox2ras = np.array( - [[np.float_(s) for s in ll.split() if s] for ll in L]) + [[np.float64(s) for s in ll.split() if s] for ll in L]) except OSError as e: print("Error occurred while executing:\n{}".format(' '.join(cmd))) raise e From b82ddd5e2ffb8dddd501af8ef517e9bf7400633b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 06:16:15 -0700 Subject: [PATCH 05/12] Bump actions/checkout from 4 to 5 (#576) Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build_docs.yml | 2 +- .github/workflows/codespell.yml | 2 +- .github/workflows/install_from_wheel.yml | 2 +- .github/workflows/run_tests.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build_docs.yml b/.github/workflows/build_docs.yml index 32d20b74..a7d9e94f 100644 --- a/.github/workflows/build_docs.yml +++ b/.github/workflows/build_docs.yml @@ -14,7 +14,7 @@ jobs: build-docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v5 diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index 22cdd283..e4e3eae4 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -17,6 +17,6 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Codespell uses: codespell-project/actions-codespell@v2 diff --git a/.github/workflows/install_from_wheel.yml b/.github/workflows/install_from_wheel.yml index 95595dba..05a3138d 100644 --- a/.github/workflows/install_from_wheel.yml +++ b/.github/workflows/install_from_wheel.yml @@ -17,7 +17,7 @@ jobs: max-parallel: 5 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v5 with: diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index fe1bd853..db45a518 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -17,7 +17,7 @@ jobs: max-parallel: 5 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v5 with: From c6b7de1a971cf746b11bd839681f5babcfa3aa90 Mon Sep 17 00:00:00 2001 From: Sunjae Shim <85246533+sjshim@users.noreply.github.com> Date: Tue, 2 Sep 2025 14:48:26 -0700 Subject: [PATCH 06/12] FIX xdrlib import compatibility for Python 3.12+ and Blender (#577) * deal with xdrlib import for blender interpreter * fix import statement for blender --- cortex/blender/__init__.py | 4 ++-- cortex/blender/blendlib.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cortex/blender/__init__.py b/cortex/blender/__init__.py index 2f21642c..e4f54020 100644 --- a/cortex/blender/__init__.py +++ b/cortex/blender/__init__.py @@ -2,7 +2,7 @@ import re import shlex import shutil -import mda_xdrlib as xdrlib +from mda_xdrlib import xdrlib import tempfile import subprocess as sp @@ -17,7 +17,7 @@ _base_imports = """import sys sys.path.insert(0, '{path}') -import xdrlib +from mda_xdrlib import xdrlib import blendlib import bpy.ops from bpy import context as C diff --git a/cortex/blender/blendlib.py b/cortex/blender/blendlib.py index 3ecb96ce..c81b1b86 100644 --- a/cortex/blender/blendlib.py +++ b/cortex/blender/blendlib.py @@ -2,7 +2,7 @@ It provides utility functions for adding meshes and saving them to communicate with the rest of pycortex """ import struct -import mda_xdrlib as xdrlib +from mda_xdrlib import xdrlib import tempfile import bpy.ops From 3390c1da1cd69b5bd21da799b0b187eb7824cb78 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 15:07:24 -0700 Subject: [PATCH 07/12] Bump actions/setup-python from 5 to 6 (#578) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 5 to 6. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/setup-python dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build_docs.yml | 2 +- .github/workflows/install_from_wheel.yml | 2 +- .github/workflows/publish_to_pypi.yml | 2 +- .github/workflows/run_tests.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build_docs.yml b/.github/workflows/build_docs.yml index a7d9e94f..39b71107 100644 --- a/.github/workflows/build_docs.yml +++ b/.github/workflows/build_docs.yml @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v5 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: 3.9 diff --git a/.github/workflows/install_from_wheel.yml b/.github/workflows/install_from_wheel.yml index 05a3138d..45e03e52 100644 --- a/.github/workflows/install_from_wheel.yml +++ b/.github/workflows/install_from_wheel.yml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/publish_to_pypi.yml b/.github/workflows/publish_to_pypi.yml index 470beac5..edeb68b5 100644 --- a/.github/workflows/publish_to_pypi.yml +++ b/.github/workflows/publish_to_pypi.yml @@ -7,7 +7,7 @@ jobs: steps: - uses: actions/checkout@master - name: Set up Python 3.9 - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: 3.9 - name: Install pypa/build diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index db45a518..49d15b98 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -19,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} From 19a900edacfd5f6e9346b3aafe81509d066887e9 Mon Sep 17 00:00:00 2001 From: dmitry Date: Sun, 12 Oct 2025 21:18:45 -0700 Subject: [PATCH 08/12] Support blender flattening; docstring improvements; notebook; --- .gitignore | 3 + cortex/blender/__init__.py | 170 ++++++++++++++--- cortex/blender/blendlib.py | 197 ++++++++++++++++--- cortex/freesurfer.py | 23 ++- cortex/segment.py | 167 ++++++++++------ examples/quickstart/fmri_flattening.ipynb | 220 ++++++++++++++++++++++ 6 files changed, 660 insertions(+), 120 deletions(-) create mode 100644 examples/quickstart/fmri_flattening.ipynb diff --git a/.gitignore b/.gitignore index 57505380..a181cb67 100644 --- a/.gitignore +++ b/.gitignore @@ -60,3 +60,6 @@ docs/_build docs/auto_examples docs/generated docs/colormaps.rst + +# Python virtual environment +.venv diff --git a/cortex/blender/__init__.py b/cortex/blender/__init__.py index e4f54020..23e6f375 100644 --- a/cortex/blender/__init__.py +++ b/cortex/blender/__init__.py @@ -5,6 +5,7 @@ from mda_xdrlib import xdrlib import tempfile import subprocess as sp +import site import numpy as np @@ -16,30 +17,76 @@ default_blender = options.config.get('dependency_paths', 'blender') _base_imports = """import sys -sys.path.insert(0, '{path}') +for site_dir in {site_dirs}: + print("Adding python site directory to sys.path:", site_dir) + sys.path.insert(0, site_dir) + from mda_xdrlib import xdrlib import blendlib import bpy.ops from bpy import context as C from bpy import data as D -""".format(path=os.path.split(os.path.abspath(__file__))[0]) +""".format(site_dirs=[ + os.path.split(os.path.abspath(__file__))[0], + *site.getsitepackages(), +]) + + +def _wrap_code(code, filename): + """ + Wrap code for running in blender + + Parameters + ---------- + code : str + code to run in blender + filename : str + file path for blender file (must end in ".blend") + """ + wrapped_code = _base_imports + if not os.path.exists(filename): + wrapped_code += "blendlib.clear_all()\n" + wrapped_code += code + wrapped_code += "\nbpy.ops.wm.save_mainfile(filepath='{fname}')".format(fname=filename) + return wrapped_code -def _call_blender(filename, code, blender_path=default_blender): - """Call blender, while running the given code. If the filename doesn't exist, save a new file in that location. + +def _call_blender(filename, code=None, background=True, blender_path=default_blender): + """ + Call blender, while running the given code. If the filename doesn't exist, save a new file in that location. New files will be initially cleared by deleting all objects. + + Parameters + ---------- + filename : str + file path for blender file (must end in ".blend") + code : str, optional + code to run in blender. If None, blender will be opened without running any code. + background : bool, optional + If True, blender will be opened in background mode. + blender_path : str, optional + Path to blender executable. If None, defaults to the path specified in pycortexconfig file. """ with tempfile.NamedTemporaryFile() as tf: print("In new named temp file: %s"%tf.name) - startcode = _base_imports - endcode = "\nbpy.ops.wm.save_mainfile(filepath='{fname}')".format(fname=filename) - cmd = "{blender_path} -b {fname} -P {tfname}".format(blender_path=blender_path, fname=filename, tfname=tf.name) - if not os.path.exists(filename): - startcode += "blendlib.clear_all()\n" - cmd = "{blender_path} -b -P {tfname}".format(blender_path=blender_path, tfname=tf.name) - else: + + # Backup + if os.path.exists(filename): _legacy_blender_backup(filename, blender_path=blender_path) - tf.write((startcode+code+endcode).encode()) - tf.flush() + + # Construct command + cmd = blender_path + if background: + cmd += " -b" + if os.path.exists(filename): + cmd += " " + filename + if code is not None: + wrapped_code = _wrap_code(code, filename) + tf.write(wrapped_code.encode()) + tf.flush() + cmd += " -P {tfname}".format(tfname=tf.name) + + print(f"Calling blender:\n {cmd}") sp.check_call([w.encode() for w in shlex.split(cmd)],) @@ -97,7 +144,7 @@ def _legacy_blender_backup(fname, blender_path=default_blender): shutil.copy(fname, fname_bkup) -def add_cutdata(fname, braindata, name="retinotopy", projection="nearest", mesh="hemi", blender_path=default_blender): +def add_cutdata(fname, braindata, name="retinotopy", projection="nearest", mesh="hemi", blender_path=None): """Add data as vertex colors to blender mesh Useful to add localizer data for help in placing flatmap cuts @@ -117,6 +164,8 @@ def add_cutdata(fname, braindata, name="retinotopy", projection="nearest", mesh= mesh : string ... """ + blender_path = blender_path or default_blender + if isinstance(braindata, dataset.Dataset): for view_name, data in braindata.views.items(): add_cutdata(fname, data, name=view_name, projection=projection, mesh=mesh) @@ -162,10 +211,12 @@ def add_cutdata(fname, braindata, name="retinotopy", projection="nearest", mesh= return -def gii_cut(fname, subject, hemi, blender_path=default_blender): +def gii_cut(fname, subject, hemi, blender_path=None): ''' Add gifti surface to blender ''' + blender_path = blender_path or default_blender + from ..database import db hemis = dict(lh='left', rh='right') @@ -194,16 +245,24 @@ def gii_cut(fname, subject, hemi, blender_path=default_blender): _call_blender(fname, code, blender_path=blender_path) -def fs_cut(fname, subject, hemi, freesurfer_subject_dir=None, blender_path=default_blender): - """Cut freesurfer surface using blender interface +def fs_cut_init(fname, subject, hemi, freesurfer_subject_dir=None, blender_path=None): + """Initialize a blender object from a freesurfer volume. Parameters ---------- fname : str file path for new .blend file (must end in ".blend") - - if `freesurfer_subject_dir` is None, it defaults to SUBJECTS_DIR environment variable + subject : str + subject name + hemi : str + hemisphere name (lh or rh) + freesurfer_subject_dir : str + path to freesurfer subject directory. If None, it defaults to SUBJECTS_DIR environment variable + blender_path : str + path to blender executable. If None, it defaults to the path specified in pycortexconfig file. """ + blender_path = blender_path or default_blender + wpts, polys, curv = freesurfer.get_surf(subject, hemi, 'smoothwm', freesurfer_subject_dir=freesurfer_subject_dir) ipts, _, _ = freesurfer.get_surf(subject, hemi, 'inflated', freesurfer_subject_dir=freesurfer_subject_dir) rcurv = np.clip(((-curv + .6) / 1.2), 0, 1) @@ -225,8 +284,29 @@ def fs_cut(fname, subject, hemi, freesurfer_subject_dir=None, blender_path=defau """.format(tfname=tf.name) _call_blender(fname, code, blender_path=blender_path) + +def fs_cut_open(fname, blender_path=None): + """Open a blender file in blender for the manual cut + + Parameters + ---------- + fname : str + file path for blender file (must end in ".blend") + blender_path : str + path to blender executable. If None, it defaults to the path specified in pycortexconfig file. + """ + blender_path = blender_path or default_blender + + _call_blender(fname, background=False, blender_path=blender_path) + + def write_patch(bname, pname, mesh="hemi", blender_path=default_blender): - """Write out the mesh 'mesh' in the blender file 'bname' into patch file 'pname' + """Deprecated: please use write_volume_patch instead""" + return write_volume_patch(bname, pname, "hemi", mesh, blender_path) + + +def write_volume_patch(bname, pname, hemi, mesh="hemi", blender_path=None): + """Write volume patch in freesurfer format. This is a necessary step for flattening the surface in freesurfer Parameters @@ -235,11 +315,18 @@ def write_patch(bname, pname, mesh="hemi", blender_path=default_blender): blender file name that contains the mesh pname : str name of patch file to be saved + hemi : str + hemisphere name (lh or rh) mesh : str name of mesh in blender file + blender_path : str, optional + path to blender executable. If None, it defaults to the path specified in pycortexconfig file. """ + blender_path = blender_path or default_blender + p = xdrlib.Packer() p.pack_string(pname.encode()) + p.pack_string(hemi.encode()) p.pack_string(mesh.encode()) with tempfile.NamedTemporaryFile() as tf: tf.write(p.get_buffer()) @@ -247,8 +334,49 @@ def write_patch(bname, pname, mesh="hemi", blender_path=default_blender): code = """with open('{tfname}', 'rb') as fp: u = xdrlib.Unpacker(fp.read()) pname = u.unpack_string().decode('utf-8') + hemi = u.unpack_string().decode('utf-8') mesh = u.unpack_string().decode('utf-8') - blendlib.save_patch(pname, mesh) + blendlib.write_volume_patch(pname, hemi, mesh) """.format(tfname=tf.name) _call_blender(bname, code, blender_path=blender_path) + return True + +def write_flat_patch(bname, pname, hemi, mesh="hemi", method="MINIMUM_STRETCH", blender_path=None): + """Write flat patch in freesurfer format. + This is a necessary step for flattening the surface in freesurfer + Parameters + ---------- + bname : str + blender file name that contains the mesh + pname : str + name of patch file to be saved + hemi : str + hemisphere name (lh or rh) + mesh : str + name of mesh in blender file + method : str + method to use for UV unwrap. One of 'CONFORMAL', 'ANGLE_BASED', 'MINIMUM_STRETCH'. + blender_path : str, optional + path to blender executable. If None, it defaults to the path specified in pycortexconfig file. + """ + blender_path = blender_path or default_blender + + p = xdrlib.Packer() + p.pack_string(pname.encode()) + p.pack_string(hemi.encode()) + p.pack_string(mesh.encode()) + p.pack_string(method.encode()) + with tempfile.NamedTemporaryFile() as tf: + tf.write(p.get_buffer()) + tf.flush() + code = """with open('{tfname}', 'rb') as fp: + u = xdrlib.Unpacker(fp.read()) + pname = u.unpack_string().decode('utf-8') + hemi = u.unpack_string().decode('utf-8') + mesh = u.unpack_string().decode('utf-8') + method = u.unpack_string().decode('utf-8') + blendlib.write_flat_patch(pname, hemi, mesh, method) + """.format(tfname=tf.name) + _call_blender(bname, code, blender_path=blender_path) + return True \ No newline at end of file diff --git a/cortex/blender/blendlib.py b/cortex/blender/blendlib.py index c81b1b86..22ad05b1 100644 --- a/cortex/blender/blendlib.py +++ b/cortex/blender/blendlib.py @@ -1,9 +1,14 @@ -"""This module is intended to be imported directly by blender. -It provides utility functions for adding meshes and saving them to communicate with the rest of pycortex +""" +This module is intended to be imported directly by blender. +It provides utility functions for adding meshes and saving them to communicate with the rest of pycortex. + +Read more about Blender Python API here: https://docs.blender.org/api/current/index.html. """ import struct from mda_xdrlib import xdrlib import tempfile +import time +import math import bpy.ops from bpy import context as C @@ -131,7 +136,7 @@ def add_shapekey(shape, name=None): key.data[i].co = shape[i] return key -def write_patch(filename, pts, edges=None): +def _write_patch(filename, pts, edges=None): """Writes a patch file that is readable by freesurfer. Parameters @@ -154,8 +159,56 @@ def write_patch(filename, pts, edges=None): fp.write(struct.pack('>i3f', -i-1, *pt)) else: fp.write(struct.pack('>i3f', i+1, *pt)) + print("Wrote freesurfer patch to %s"%filename) + +def _circularize_uv_coords(pts, u_min, u_max, v_min, v_max): + """Transform UV coordinates into a circular shape while preserving relative positions. + + Parameters + ---------- + pts : dict + Dictionary mapping vertex indices to (u, v, z) coordinates + u_min, u_max, v_min, v_max : float + Original bounds of the UV coordinates + + Returns + ------- + dict + Dictionary mapping vertex indices to new (u, v, z) coordinates + """ + # Convert to normalized coordinates in [-1, 1] range + u_center = (u_max + u_min) / 2 + v_center = (v_max + v_min) / 2 + u_scale = (u_max - u_min) / 2 + v_scale = (v_max - v_min) / 2 + + new_pts = {} + for idx, (u, v, z) in pts.items(): + # Normalize coordinates + u_norm = (u - u_center) / u_scale + v_norm = (v - v_center) / v_scale + + # Convert to polar coordinates + r = math.sqrt(u_norm**2 + v_norm**2) + theta = math.atan2(v_norm, u_norm) + + # Normalize radius to create perfect circle + # Use square root to preserve area/density + r = math.sqrt(r) + + # Convert back to Cartesian coordinates + u_new = r * math.cos(theta) + v_new = r * math.sin(theta) + + # Scale back to original range + u_new = u_new * u_scale + u_center + v_new = v_new * v_scale + v_center + + new_pts[idx] = (u_new, v_new, z) + + return new_pts -def _get_pts_edges(mesh): +def _get_geometry(mesh, hemi, flatten, method=None): """Function called within blender to get non-cut vertices & edges Operates on a mesh object within an open instance of blender. @@ -164,10 +217,26 @@ def _get_pts_edges(mesh): ---------- mesh : str name of mesh to cut + hemi : str + hemisphere name (lh or rh) + flatten : bool + if True, returns flattened coordinates using UV unwrap + method : str + method to use for UV unwrap. One of 'CONFORMAL', 'ANGLE_BASED', 'MINIMUM_STRETCH'. + + Returns + ------- + verts : set + set of vertex indices + pts : list + list of (vertex_index, flattened_coordinates) tuples + edges : set + set of edge vertex indices """ if isinstance(mesh, str): mesh = D.meshes[mesh] + # Collect edge vertex indices bpy.ops.object.mode_set(mode='OBJECT') bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_all(action='DESELECT') @@ -175,30 +244,33 @@ def _get_pts_edges(mesh): bpy.ops.mesh.select_non_manifold() bpy.ops.object.mode_set(mode='OBJECT') - mwall_edge = set() + edge_vertex_idxs = set() # Medial wall in standard case for edge in mesh.edges: if edge.select: - mwall_edge.add(edge.vertices[0]) - mwall_edge.add(edge.vertices[1]) + edge_vertex_idxs.add(edge.vertices[0]) + edge_vertex_idxs.add(edge.vertices[1]) + # Collect seam vertex indices & select seams bpy.ops.object.mode_set(mode='EDIT') C.tool_settings.mesh_select_mode = True, False, False bpy.ops.mesh.select_all(action='DESELECT') bpy.ops.object.mode_set(mode='OBJECT') - seam = set() + seam_vertex_idxs = set() for edge in mesh.edges: if edge.use_seam: - seam.add(edge.vertices[0]) - seam.add(edge.vertices[1]) + seam_vertex_idxs.add(edge.vertices[0]) + seam_vertex_idxs.add(edge.vertices[1]) edge.select = True + # Expand seam selection & collect expanded vertex indices bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_more() bpy.ops.object.mode_set(mode='OBJECT') - smore = set() + expanded_seam_vertex_idxs = set() for i, vert in enumerate(mesh.vertices): if vert.select: - smore.add(i) + expanded_seam_vertex_idxs.add(i) + # Leave cuts (+ area around them) selected. # Uncomment the next lines to revert to previous behavior # (deselecting everything) @@ -206,26 +278,93 @@ def _get_pts_edges(mesh): # bpy.ops.mesh.select_all(action='DESELECT') # bpy.ops.object.mode_set(mode='OBJECT') - fverts = set() - if hasattr(mesh, "polygons"): - faces = mesh.polygons - else: - faces = mesh.faces - for face in faces: - fverts.add(face.vertices[0]) - fverts.add(face.vertices[1]) - fverts.add(face.vertices[2]) - - print("exported %d faces"%len(fverts)) - edges = mwall_edge | (smore - seam) - verts = fverts - seam + face_vertices = set() + for face in getattr(mesh, "polygons", getattr(mesh, "faces", None)): + face_vertices.add(face.vertices[0]) + face_vertices.add(face.vertices[1]) + face_vertices.add(face.vertices[2]) + + verts = face_vertices - seam_vertex_idxs pts = [(v, D.shape_keys['Key'].key_blocks['inflated'].data[v].co) for v in verts] + edges = edge_vertex_idxs | (expanded_seam_vertex_idxs - seam_vertex_idxs) + + if flatten: + # Scales + u_coords, v_coords = [u for _, (u, _, _) in pts], [v for _, (_, v, _) in pts] + u_min, u_max, v_min, v_max = min(u_coords), max(u_coords), min(v_coords), max(v_coords) + print("u_min: %f, u_max: %f, v_min: %f, v_max: %f"%(u_min, u_max, v_min, v_max)) + + if not mesh.uv_layers: + mesh.uv_layers.new(name="FlattenUV") + + print("UV unwrapping mesh with method %s (may take a few minutes)..."%method) + start = time.time() + bpy.ops.object.mode_set(mode='EDIT') + bpy.ops.mesh.select_all(action='SELECT') + bpy.ops.uv.unwrap(method=method, margin=0.001) + bpy.ops.object.mode_set(mode='OBJECT') + end = time.time() + print("UV unwrapping mesh took %.1f seconds" % (end - start)) + + print("Collecting coordinates for %d verts..."%len(verts)) + start = time.time() + pts = {} + for loop in mesh.loops: + if loop.vertex_index in verts: + coords2d = mesh.uv_layers.active.data[loop.index].uv + u_scaled = coords2d[0] * (u_max - u_min) + u_min + v_scaled = coords2d[1] * (v_max - v_min) + v_min + + if hemi == "rh": + # Rotate 180 degrees clockwise + u_center = (u_max + u_min) / 2 + v_center = (v_max + v_min) / 2 + u_rotated = 2 * u_center - u_scaled + v_rotated = 2 * v_center - v_scaled + pts[loop.vertex_index] = (u_rotated, v_rotated, 0.0) + else: + pts[loop.vertex_index] = (u_scaled, v_scaled, 0.0) + + # Circularize the UV coordinates + print("Circularizing UV coordinates...") + pts = _circularize_uv_coords(pts, u_min, u_max, v_min, v_max) + + pts = sorted(pts.items(), key=lambda x: x[0]) + end = time.time() + print("Collecting coordinates took %.1f seconds" % (end - start)) + + print("Collected geometry. verts: %d, pts: %d, edges: %d"%(len(verts), len(pts), len(edges))) return verts, pts, edges -def save_patch(fname, mesh='hemi'): - """Saves patch to file that can be read by freesurfer""" - verts, pts, edges = _get_pts_edges(mesh) - write_patch(fname, pts, edges) + +def save_patch(fname, mesh="hemi"): + """Deprecated: please use write_volume_patch instead""" + return write_volume_patch(fname, "lh", mesh) + + +def write_volume_patch(fname, hemi, mesh="hemi"): + """Write mesh patch in freesurfer format""" + _, pts, edges = _get_geometry(mesh, hemi, flatten=False) + _write_patch(fname, pts, edges) + + +def write_flat_patch(fname, hemi, mesh="hemi", method="MINIMUM_STRETCH"): + """Write flat patch in freesurfer format + + Parameters + ---------- + fname : str + Output filename + hemi : str + hemisphere name (lh or rh) + mesh : str + Name of the mesh to flatten + method : str + UV unwrapping method to use + """ + _, pts, edges = _get_geometry(mesh, hemi, flatten=True, method=method) + _write_patch(fname, pts, edges) + def read_xdr(filename): with open(filename, "rb") as fp: diff --git a/cortex/freesurfer.py b/cortex/freesurfer.py index 4d21d901..f384b0ff 100644 --- a/cortex/freesurfer.py +++ b/cortex/freesurfer.py @@ -283,6 +283,9 @@ def import_flat(fs_subject, patch, hemis=['lh', 'rh'], cx_subject=None, List of hemispheres to import. Defaults to both hemispheres. cx_subject : str Pycortex subject name + flat_type : str + Type of flatmap to import. Defaults to 'freesurfer'. + Can be 'freesurfer', 'slim', or 'blender'. freesurfer_subject_dir : str directory for freesurfer subjects. None defaults to environment variable $SUBJECTS_DIR @@ -308,15 +311,19 @@ def import_flat(fs_subject, patch, hemis=['lh', 'rh'], cx_subject=None, from . import formats for hemi in hemis: - if flat_type == 'freesurfer': - pts, polys, _ = get_surf(fs_subject, hemi, "patch", patch+".flat", freesurfer_subject_dir=freesurfer_subject_dir) - # Reorder axes: X, Y, Z instead of Y, X, Z - flat = pts[:, [1, 0, 2]] - # Flip Y axis upside down - flat[:, 1] = -flat[:, 1] + if flat_type in ['freesurfer', 'blender']: + surf_path = (patch + ".flat") if (flat_type == 'freesurfer') else (patch + ".flat.blender") + pts, polys, _ = get_surf(fs_subject, hemi, "patch", surf_path, freesurfer_subject_dir=freesurfer_subject_dir) + + if flat_type == 'freesurfer': + # Reorder axes: X, Y, Z instead of Y, X, Z + flat = pts[:, [1, 0, 2]] + # Flip Y axis upside down + flat[:, 1] = -flat[:, 1] + else: + flat = pts elif flat_type == 'slim': - flat_file = get_paths(fs_subject, hemi, type='slim', - freesurfer_subject_dir=freesurfer_subject_dir) + flat_file = get_paths(fs_subject, hemi, type='slim', freesurfer_subject_dir=freesurfer_subject_dir) flat_file = flat_file.format(name=patch + ".flat") flat, polys = formats.read_obj(flat_file) diff --git a/cortex/segment.py b/cortex/segment.py index da3ea925..51534707 100644 --- a/cortex/segment.py +++ b/cortex/segment.py @@ -6,8 +6,8 @@ import warnings import numpy as np import subprocess as sp -from builtins import input import multiprocessing as mp +from builtins import input from . import formats from . import blender @@ -125,7 +125,8 @@ def edit_segmentation(subject, def cut_surface(cx_subject, hemi, name='flatten', fs_subject=None, data=None, freesurfer_subject_dir=None, flatten_with='freesurfer', - do_import_subject=True, blender_cmd=None, **kwargs): + method=None, do_import_subject=True, blender_path=None, + recache=True, auto_overwrite=False, **kwargs): """Initializes an interface to cut the segmented surface for flatmapping. This function creates or opens a blend file in your filestore which allows surfaces to be cut along hand-defined seams. Blender will automatically @@ -162,80 +163,122 @@ def cut_surface(cx_subject, hemi, name='flatten', fs_subject=None, data=None, (https://github.com/MichaelRabinovich/Scalable-Locally-Injective-Mappings) to your computer and set the slim dependency path in your pycortex config file to point to /ReweightedARAP + method : str + method to use for UV unwrap. When using Blender, it must be present and + can be one of 'CONFORMAL', 'ANGLE_BASED', 'MINIMUM_STRETCH'. do_import_subject : bool set option to automatically import flatmaps when both are completed (if set to false, you must import later with `cortex.freesurfer.import_flat()`) + blender_path : str + Path to blender executable. If None, defaults to path specified in pycortexconfig file. + recache : boolean + Whether or not to recache intermediate files. Takes longer to plot this way, potentially + resolves some errors. Useful if you've made changes to the alignment + auto_overwrite : bool + Whether to overwrite existing flatmaps. If True, the flatmap will be + overwritten without asking for confirmation. """ + + blender_path = blender_path or "/Applications/Blender.app/Contents/MacOS/Blender" if fs_subject is None: fs_subject = cx_subject - opts = "[hemi=%s,name=%s]"%(hemi, name) - fname = db.get_paths(cx_subject)['anats'].format(type='cutsurf', opts=opts, ext='blend') + # Double-check that fiducial and inflated vertex counts match - # (these may not match if a subject is initially imported from freesurfer to pycortex, + # (these may not match if a subject is initially imported from freesurfer to pycortex, # and then edited further for a better segmentation and not re-imported) - ipt, ipoly, inrm = freesurfer.get_surf(fs_subject, hemi, 'inflated') - fpt, fpoly, fnrm = freesurfer.get_surf(fs_subject, hemi, 'fiducial') + ipt, ipoly, inrm = freesurfer.get_surf(fs_subject, hemi, "inflated") + fpt, fpoly, fnrm = freesurfer.get_surf(fs_subject, hemi, "fiducial") if ipt.shape[0] != fpt.shape[0]: - raise ValueError("Please re-import subject - fiducial and inflated vertex counts don't match!") + raise ValueError( + "Please re-import subject - fiducial and inflated vertex counts don't match!" + ) else: - print('Vert check ok!') - if not os.path.exists(fname): - blender.fs_cut(fname, fs_subject, hemi, freesurfer_subject_dir) + print("Vert check ok!") + + # Create blender file with cuts + opts = "[hemi=%s,name=%s]" % (hemi, name) + fname = db.get_paths(cx_subject)["anats"].format( + type="cutsurf", opts=opts, ext="blend" + ) + if not os.path.exists(fname) or recache: + if os.path.exists(fname): + os.remove(fname) + print("Initializing blender file %s..."%fname) + blender.fs_cut_init(fname, fs_subject, hemi, freesurfer_subject_dir, blender_path=blender_path) + # Add localizer data to facilitate cutting if data is not None: - if isinstance(data, list): - for d in data: - blender.add_cutdata(fname, d, name=d.description) - else: - blender.add_cutdata(fname, data, name=data.description) - if blender_cmd is None: - blender_cmd = options.config.get('dependency_paths', 'blender') - # May be redundant after blender.fs_cut above... - if os.path.exists(fname): - blender._legacy_blender_backup(fname, blender_path=blender_cmd) - sp.call([blender_cmd, fname]) - patchpath = freesurfer.get_paths(fs_subject, hemi, - freesurfer_subject_dir=freesurfer_subject_dir) - patchpath = patchpath.format(name=name) - blender.write_patch(fname, patchpath, blender_path=blender_cmd) - if flatten_with == 'freesurfer': - done = freesurfer.flatten(fs_subject, hemi, patch=name, - freesurfer_subject_dir=freesurfer_subject_dir, - **kwargs) - if not done: - # If flattening is aborted, skip the rest of this function - # (Do not attempt to import completed flatmaps) - return - if do_import_subject: - # Check to see if both hemispheres have been flattened - other = freesurfer.get_paths(fs_subject, "lh" if hemi == "rh" else "rh", - freesurfer_subject_dir=freesurfer_subject_dir) - other = other.format(name=name+".flat") - # If so, go ahead and import subject - if os.path.exists(other): - freesurfer.import_flat(fs_subject, name, cx_subject=cx_subject, - flat_type='freesurfer', - freesurfer_subject_dir=freesurfer_subject_dir) + data = data if isinstance(data, list) else [data] + for d in data: + blender.add_cutdata(fname, d, name=d.description, blender_path=blender_path) + + # Open blender for user to manually do the cuts + print("Opening blender file %s..."%fname) + blender.fs_cut_open(fname, blender_path=blender_path) + + # Generate 3D base freesurfer patch to flatten + base_patch_path = freesurfer.get_paths( + fs_subject, hemi, freesurfer_subject_dir=freesurfer_subject_dir + ).format(name=name) + if not os.path.exists(base_patch_path) or recache: + if os.path.exists(base_patch_path): + os.remove(base_patch_path) + print("Writing base patch to %s..."%base_patch_path) + blender.write_volume_patch(fname, base_patch_path, hemi, blender_path=blender_path) + """ + pts_v, polys_v, _ = freesurfer.get_surf(fs_subject, hemi, "patch", name, freesurfer_subject_dir=freesurfer_subject_dir) + pts_f, polys_f, _ = freesurfer.get_surf(fs_subject, hemi, "patch", name+".flat", freesurfer_subject_dir=freesurfer_subject_dir) + """ + + # Flatten + if flatten_with == 'blender': + assert method is not None, "method must be provided when using blender" + + flat_patch_path = freesurfer.get_paths( + fs_subject, hemi, freesurfer_subject_dir=freesurfer_subject_dir + ).format(name=name + ".flat.blender") + if os.path.exists(flat_patch_path): + os.remove(flat_patch_path) + + print("Generating flat patch via blender method %s to %s..."%(method, flat_patch_path)) + done = blender.write_flat_patch(fname, flat_patch_path, hemi, method=method, blender_path=blender_path) + path_type, flat_type = "patch", "blender" + elif flatten_with == 'freesurfer': + print("Generating flat patch via freesurfer...") + done = freesurfer.flatten( + fs_subject, hemi, patch=name, freesurfer_subject_dir=freesurfer_subject_dir, **kwargs + ) + path_type, flat_type = "patch", "freesurfer" elif flatten_with == 'SLIM': - done = flatten_slim(fs_subject, hemi, patch=name, - freesurfer_subject_dir=freesurfer_subject_dir, - **kwargs) - if not done: - # If flattening is aborted, skip the rest of this function - # (Do not attempt to import completed flatmaps) - return - if do_import_subject: - other = freesurfer.get_paths(fs_subject, "lh" if hemi == "rh" else "rh", - type='slim', - freesurfer_subject_dir=freesurfer_subject_dir) - other = other.format(name=name) - # If so, go ahead and import subject - if os.path.exists(other): - freesurfer.import_flat(fs_subject, name, cx_subject=cx_subject, - flat_type='slim', - freesurfer_subject_dir=freesurfer_subject_dir) + print("Generating flat patch via SLIM...") + done = flatten_slim( + fs_subject, hemi, patch=name, freesurfer_subject_dir=freesurfer_subject_dir, **kwargs + ) + path_type, flat_type = "slip", "slim" + else: + raise ValueError(f"Invalid flatten_with: {flatten_with}") - return + # If flattening is aborted, skip the rest of this function + # (Do not attempt to import completed flatmaps) + if not done: + return + + # Import from freesurfer to pycortex DB + if do_import_subject: + hemi = "lh" if hemi == "rh" else "rh" + other = freesurfer.get_paths( + fs_subject, hemi, path_type, freesurfer_subject_dir=freesurfer_subject_dir + ).format(name=name) + + if os.path.exists(other): # Only when both hemi flats are present + freesurfer.import_flat( + fs_subject, + name, + cx_subject=cx_subject, + flat_type=flat_type, + auto_overwrite=auto_overwrite, + freesurfer_subject_dir=freesurfer_subject_dir, + ) def flatten_slim(subject, hemi, patch, n_iterations=20, freesurfer_subject_dir=None, diff --git a/examples/quickstart/fmri_flattening.ipynb b/examples/quickstart/fmri_flattening.ipynb new file mode 100644 index 00000000..b036190c --- /dev/null +++ b/examples/quickstart/fmri_flattening.ipynb @@ -0,0 +1,220 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Functional MRI pial flattening in 20 minutes\n", + "\n", + "This notebook demonstrates how to use PyCortex to:\n", + "1. Cut and flatten brain surfaces in under 20 minutes;\n", + "2. Preview flattened surface in under 10 seconds;\n", + "3. Visualize BOLD fMRI data on the flattened surfaces" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prerequisites\n", + "\n", + "You will need to run auto" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 1. Reconcile subject's anatomy with Freesurfer\n", + "\n", + "Make sure you have fully reconciled the subject's. Detailed guidance is out of scope for this notebook, but roughly you should've run these lines:\n", + "\n", + "```\n", + "recon-all -autorecon1 -s sub-01\n", + "recon-all -autorecon2 -s sub-01\n", + "recon-all -autorecon3 -s sub-01\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 2. Install Python dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "import cortex\n", + "import nibabel as nib\n", + "import numpy as np\n", + "from matplotlib import pyplot as plt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 1: Align Function MRI to Anatomical\n", + "\n", + "`cortex.align.automatic` perform automatic alignment using Freesurfer's boundary-based registration. It is fully automated. This should take ~1 minute." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "bold_nii_gz_path = \"path to bold.nii.gz\"\n", + "cortex.align.automatic(\n", + " subject=\"sub-01\",\n", + " xfmname=\"full\",\n", + " reference=bold_nii_gz_path,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 2: Surface Cutting and Flattening\n", + "\n", + "`cortex.segment.cut_surface` performs surface cutting. It is semi-automatic, meaning it requires a bit of manual work. It will start Blender for us to manually specify the cuts. Make sure Blender is installed on your system.\n", + "\n", + "#### A. Left hemisphere\n", + "\n", + "Let's start with the left hemisphere.\n", + "\n", + "Please follow [this guide](https://www.youtube.com/watch?v=D4tylQ_mMuM) on cuts. This will take ~15 minutes." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cortex.segment.cut_surface(\n", + " \"sub-01\",\n", + " \"lh\",\n", + " name=\"exp\",\n", + " flatten_with=\"blender\",\n", + " method=\"CONFORMAL\",\n", + " recache=True,\n", + " do_import_subject=False,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### B. Right hemisphere\n", + "\n", + "Then repeat the same for the right hemisphere.\n", + "\n", + "This will take another ~5 minutes.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Right hemisphere\n", + "cortex.segment.cut_surface(\n", + " \"sub-01\",\n", + " \"rh\",\n", + " name=\"exp\",\n", + " flatten_with=\"blender\",\n", + " method=\"CONFORMAL\",\n", + " recache=True,\n", + " auto_overwrite=True,\n", + " do_import_subject=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Step 3: Visualize fMRI\n", + "\n", + "Now we'll load the BOLD fMRI data and prepare it for visualization. We'll compute the temporal mean of the BOLD signal to create a static visualization.\n", + "\n", + "Then we can use `cortex.quickshow` to visualize the BOLD data on the flattened surfaces using pycortex.\n", + "\n", + "This will take 30 seconds." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Load functional data\n", + "bold_path = \"path to bold.nii.gz\"\n", + "img = nib.load(bold_path)\n", + "data = img.get_fdata()\n", + "\n", + "# Compute temporal mean\n", + "mean_vol = np.mean(data, axis=3)\n", + "\n", + "# Transpose to match expected orientation (96,96,76) → (76,96,96)\n", + "mean_vol = np.transpose(mean_vol, (2, 0, 1))\n", + "\n", + "# Select cuboid\n", + "cuboid = np.array([[0, 20, 0], [50, 70, 70]]) # (2, 3) as a pair of (min, max) voxels\n", + "print(mean_vol.shape)\n", + "cuboid_slice = tuple(slice(*span) for span in cuboid.transpose())\n", + "mean_vol_filtered = np.zeros_like(mean_vol)\n", + "mean_vol_filtered[cuboid_slice] = mean_vol[cuboid_slice]\n", + "mean_vol = mean_vol_filtered\n", + "\n", + "\n", + "# Create Volume object for visualization\n", + "vol = cortex.Volume(\n", + " mean_vol,\n", + " subject=\"sub-01\",\n", + " depth=1,\n", + " height=512,\n", + " xfmname=\"full\",\n", + " vmin=np.min(mean_vol),\n", + " vmax=np.max(mean_vol),\n", + ")\n", + "\n", + "# Display the visualization\n", + "cortex.quickshow(vol, with_colorbar=True, recache=True)\n", + "plt.show()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.17" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} From eda5767294cc48b25206afc261ddbcbdf10d5002 Mon Sep 17 00:00:00 2001 From: dmitry Date: Sun, 12 Oct 2025 21:18:45 -0700 Subject: [PATCH 09/12] Docs update --- cortex/segment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cortex/segment.py b/cortex/segment.py index 51534707..488ad59a 100644 --- a/cortex/segment.py +++ b/cortex/segment.py @@ -155,7 +155,7 @@ def cut_surface(cx_subject, hemi, name='flatten', fs_subject=None, data=None, Name of Freesurfer subject directory. None defaults to SUBJECTS_DIR environment variable flatten_with : str - 'freesurfer' or 'SLIM' - 'freesurfer' (default) uses freesurfer's + One of 'freesurfer','SLIM', or 'blender' - 'freesurfer' (default) uses freesurfer's `mris_flatten` function to flatten the cut surface. 'SLIM' uses the SLIM algorithm, which takes much less time but tends to leave more distortions in the flatmap. SLIM is an optional dependency, and From ace52fc2e93bcf061b85427bd924bd6373bad6b7 Mon Sep 17 00:00:00 2001 From: dmitry Date: Sun, 12 Oct 2025 21:18:45 -0700 Subject: [PATCH 10/12] Documentation on the cuts --- docs/segmentation_guide.rst | 104 ++++++++++++++++++++++++++++++++---- 1 file changed, 94 insertions(+), 10 deletions(-) diff --git a/docs/segmentation_guide.rst b/docs/segmentation_guide.rst index 4f32d5af..eaf746b3 100644 --- a/docs/segmentation_guide.rst +++ b/docs/segmentation_guide.rst @@ -17,7 +17,7 @@ The brain model is exported to a 3D modeling program called Blender, where you w **3. Labeling ROIs** -Here you will project functional data (semantic betas or localizer data, as well as retinotopic) onto the flatmaps, allowing you to label Regions of Interest on the brain - areas responsive to faces, scenes, or whatever else we’re analyzing. +Here you will project functional data (semantic betas or localizer data, as well as retinotopic) onto the flatmaps, allowing you to label Regions of Interest on the brain - areas responsive to faces, scenes, or whatever else we're analyzing. In this guide, we will go over the first two steps. @@ -46,7 +46,7 @@ To open freesurfer: For example: ``/auto/myfolder/freesurfer/SetUpFreeSurfer.sh`` -Create a “subjects” directory. If a "subjects" directory doesn't exist, make one in the FreeSurfer directory. Freesurfer is finicky about directories, so this step is crucial. +Create a "subjects" directory. If a "subjects" directory doesn't exist, make one in the FreeSurfer directory. Freesurfer is finicky about directories, so this step is crucial. @@ -69,7 +69,7 @@ In this case, you just give Freesurfer the name of the very first dicom file in For example: ``/auto/myfolder/anatomy/Subject/Subject_t1_nii -s Subject`` -The ‘-s Subject’ portion creates a folder, in this case a folder titled "Subject". The folder should be named for the subject. +The '-s Subject' portion creates a folder, in this case a folder titled "Subject". The folder should be named for the subject. @@ -102,14 +102,14 @@ At this stage, you just want to make sure that autorecon1 ran successfully and t of non-brain anatomy were not left behind. If big chunks of eye or skull were left behind, it is good to manually delete them yourself. If autorecon1 ran successfully, you can probably skip manual editing even if some anatomy was left behind since the next step, autorecon2, is quite -accurate at determining brain surfaces even if non-brain anatomy was left behind. However, it’s good to double check that everything worked out. +accurate at determining brain surfaces even if non-brain anatomy was left behind. However, it's good to double check that everything worked out. To pull up the newly stripped brains and make manual edits, type in your terminal: ``ipython`` ``import cortex`` - ``cortex.segment.fix_wm(‘Subject’)`` + ``cortex.segment.fix_wm('Subject')`` This should cause three windows to pop up: a mayavi viewer with the 3D brain, one of the brain in 2D, and one of a tool bar. At this point, you want to edit individual voxels. This mostly consists of getting rid of remaining skull and eyes. To do this, click the edit voxels tool on the toolbox bar or press A on your keyboard as a shortcut. After this, to delete voxels, simply right click the areas you wish to delete. If you erase something by accident and want to undo it, press CTRL + Z (this only works for the last thing you erased so be careful). @@ -151,12 +151,12 @@ Tools > Configure volume brush Set Clone Source to Aux Volume This lets you paint from the aux volume to the mask. -Set Mode back to New Value if you’re done. +Set Mode back to New Value if you're done. To change brush size: Tools > Configure brush info > Change Radius -To change the size of the "paintbrush”, in the tool bar, go to: tools > configure brush info and +To change the size of the "paintbrush", in the tool bar, go to: tools > configure brush info and change the radius. A shortcut to do the same thing is to press the numbers on the keypad of your keyboard (where 1 is 1x1, 4 is 4x4, etc). Generally you should just work with a 1-pixel radius, though. @@ -206,9 +206,9 @@ not labeled as white matter when they should be. The command to make these edits ``import cortex`` - ``cortex.segment.fix_wm(“subject”)`` + ``cortex.segment.fix_wm("subject")`` -We’ll look through the results of autorecon2, examining the white matter curve and masks, and then the pial (gray matter) curve. This can be a lengthy process; because it’s an entirely nonverbal task, I recommend listening to podcasts as you go. +We'll look through the results of autorecon2, examining the white matter curve and masks, and then the pial (gray matter) curve. This can be a lengthy process; because it's an entirely nonverbal task, I recommend listening to podcasts as you go. | @@ -221,10 +221,94 @@ it shouldn't (such as gray matter and/or leftover pieces of eye or skull) as wel green/yellow surfaces. Make sure to hit "A" to switch to edit mode. -Autorecon on the white matter surface should take about 2 hours. These manual edits are an iterative process; when it’s done, go back and look over the 3D surface, and make any changes that seem necessary. New spikes can appear in unexpected places, so three or four iterations may be needed, probably more if you are just starting to learn how to do it. +Autorecon on the white matter surface should take about 2 hours. These manual edits are an iterative process; when it's done, go back and look over the 3D surface, and make any changes that seem necessary. New spikes can appear in unexpected places, so three or four iterations may be needed, probably more if you are just starting to learn how to do it. +Making cuts +################################## + +After completing the segmentation phase, the next step is to make cuts in the brain surface to prepare it for flattening. This process involves creating cuts along the brain's sulci to transform the 3D surface into a 2D flatmap with minimal distortion. + +PyCortex provides three different methods for cutting and flattening brain surfaces: + +**1. Freesurfer (Recommended)** +The traditional and most reliable method that uses Freesurfer's `mris_flatten` command. This method produces high-quality flatmaps with minimal distortion but takes approximately 2 hours per hemisphere. + +**2. SLIM** +An experimental method using the SLIM algorithm that is very fast but tends to leave more distortions in the flatmap. Requires additional installation of the SLIM dependency. + +**3. Blender** +A newer method that uses Blender's UV unwrapping capabilities for faster flattening (typically 5-15 minutes per hemisphere). While faster, it may introduce more distortion compared to Freesurfer. + +The complete process begins with manual cutting in Blender, where you'll make cuts to prepare the surface for flattening. Once the cuts are complete, the cut surface is automatically flattened using your chosen method. Finally, the resulting flatmap is imported into PyCortex for visualization and analysis. + +You may follow the steps below or a `Python notebook `_. + +Step 1: Manual Cutting in Blender +*************************************************** + +Start the cutting process by calling `cortex.segment.cut_surface()`. This function will create a Blender file with your brain surface, open Blender automatically, and allow you to make manual cuts for the left hemisphere. + +.. code-block:: python + + import cortex + + cortex.segment.cut_surface( + "sub-01", # Your subject ID + "lh", # Left hemisphere + name="flatten", # Name for this flattening attempt + flatten_with="freesurfer", # Or "SLIM" or "blender" + recache=True, # Force recache of the subject + do_import_subject=False, # Don't import until both hemispheres are done + ) + +To make the cuts please watch the `cutting tutorial video `_. + +Step 2: Repeat for Right Hemisphere +*************************************************** + +After completing the left hemisphere, repeat the process for the right hemisphere. + +.. code-block:: python + cortex.segment.cut_surface( + "sub-01", # Your subject ID + "rh", # Right hemisphere + name="flatten", # Name for this flattening attempt + flatten_with="freesurfer", # Or "SLIM" or "blender" + recache=True, # Force recache of the subject + auto_overwrite=True, # Overwrite PyCortex record + do_import_subject=True, # Import both hemispheres when done + ) +After completing both hemispheres, your flatmap will be automatically imported into PyCortex and ready for visualization and analysis. + + +Step 3: Verify the cuts +*************************************************** +After completing both hemispheres, your flatmap will be automatically imported into PyCortex and ready for visualization and analysis. + +To verify that your cuts and flattening worked correctly, you can visualize the results using PyCortex's visualization tools. Here's a verification script: + +.. code-block:: python + + import cortex + import numpy as np + from matplotlib import pyplot as plt + + test_data = np.random.rand(1000) # Random data + + vol = cortex.Volume( + test_data, + subject="sub-01", + xfmname="full", + vmin=0, + vmax=1 + ) + + # Display the visualization on the flatmap + cortex.quickshow(vol, with_colorbar=True, recache=True) + plt.show() +Alternatively, you may follow one of the examples from the gallery. From f5fb8f8e8d790331abf4114c70a234e94b271645 Mon Sep 17 00:00:00 2001 From: dmitry Date: Sun, 12 Oct 2025 21:18:45 -0700 Subject: [PATCH 11/12] Aligning coordinates conversion with RAS --- examples/quickstart/fmri_flattening.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/quickstart/fmri_flattening.ipynb b/examples/quickstart/fmri_flattening.ipynb index b036190c..84be61a9 100644 --- a/examples/quickstart/fmri_flattening.ipynb +++ b/examples/quickstart/fmri_flattening.ipynb @@ -168,7 +168,7 @@ "mean_vol = np.mean(data, axis=3)\n", "\n", "# Transpose to match expected orientation (96,96,76) → (76,96,96)\n", - "mean_vol = np.transpose(mean_vol, (2, 0, 1))\n", + "mean_vol = np.transpose(mean_vol, (2, 1, 0))\n", "\n", "# Select cuboid\n", "cuboid = np.array([[0, 20, 0], [50, 70, 70]]) # (2, 3) as a pair of (min, max) voxels\n", From 46ee94a7d0c81d2fe84978096ab73e917d427797 Mon Sep 17 00:00:00 2001 From: dmitry Date: Sun, 12 Oct 2025 21:18:45 -0700 Subject: [PATCH 12/12] Pulling default blender path from config --- cortex/segment.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cortex/segment.py b/cortex/segment.py index 488ad59a..0981d313 100644 --- a/cortex/segment.py +++ b/cortex/segment.py @@ -18,7 +18,8 @@ from .freesurfer import autorecon as run_freesurfer_recon from .freesurfer import import_subj as import_freesurfer_subject -slim_path = options.config.get('dependency_paths', 'slim') +default_blender_path = options.config.get('dependency_paths', 'blender') +default_slim_path = options.config.get('dependency_paths', 'slim') def init_subject(subject, filenames, do_import_subject=False, **kwargs): @@ -125,7 +126,7 @@ def edit_segmentation(subject, def cut_surface(cx_subject, hemi, name='flatten', fs_subject=None, data=None, freesurfer_subject_dir=None, flatten_with='freesurfer', - method=None, do_import_subject=True, blender_path=None, + method=None, do_import_subject=True, blender_path=default_blender_path, recache=True, auto_overwrite=False, **kwargs): """Initializes an interface to cut the segmented surface for flatmapping. This function creates or opens a blend file in your filestore which allows @@ -179,7 +180,6 @@ def cut_surface(cx_subject, hemi, name='flatten', fs_subject=None, data=None, overwritten without asking for confirmation. """ - blender_path = blender_path or "/Applications/Blender.app/Contents/MacOS/Blender" if fs_subject is None: fs_subject = cx_subject @@ -282,7 +282,7 @@ def cut_surface(cx_subject, hemi, name='flatten', fs_subject=None, data=None, def flatten_slim(subject, hemi, patch, n_iterations=20, freesurfer_subject_dir=None, - slim_path=slim_path, do_flatten=None): + slim_path=default_slim_path, do_flatten=None): """Flatten brain w/ slim object flattening Parameters