From 6ba5ae883b1b9cc6d9ddf6cdcbfb4c1e6e2ba13c Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Wed, 15 Oct 2025 18:13:34 +0200 Subject: [PATCH 01/12] plumed+metatomic --- .../builtin/packages/plumed/package.py | 75 +++++++++++++++++-- .../builtin/packages/py_torch/package.py | 6 ++ 2 files changed, 76 insertions(+), 5 deletions(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index 97932965113..122445a3fcb 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -4,6 +4,8 @@ import collections import os +from pathlib import Path + from spack_repo.builtin.build_systems.autotools import AutotoolsPackage @@ -35,6 +37,7 @@ class Plumed(AutotoolsPackage): version("master", branch="master") + version("2.10.0", sha256="ca6410d47e91b4e0f953e1a8933f15b05c4681167611ab3b096ab121155f6879") version("2.9.2", sha256="301fbc958374f81d9b8c7a1eac73095f6dded52cce73ce33d64bdbebf51ac63d") version("2.9.1", sha256="e24563ad1eb657611918e0c978d9c5212340f128b4f1aa5efbd439a0b2e91b58") version("2.9.0", sha256="612d2387416b5f82dd8545709921440370e144fd46cef633654cf0ee43bac5f8") @@ -112,6 +115,7 @@ class Plumed(AutotoolsPackage): conditional("opes", when="@2.7:"), conditional("pamm", when="optional_modules=adjmat"), "piv", + conditional("pytorch", when="@2.9:"), conditional("s2cm", when="@2.8:"), conditional("sasa", when="@2.8:"), "secondarystructure", @@ -138,6 +142,18 @@ class Plumed(AutotoolsPackage): values=("none", "cpu", "cuda", "opencl"), description="Activates FireArray support", ) + variant( + "pytorch", + default=False, + description="Activates PyTorch support", + when="@2.9:", + ) + variant( + "metatomic", + default=False, + description="Activates PyTorch support", + when="@2.10:", + ) depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated @@ -160,12 +176,16 @@ class Plumed(AutotoolsPackage): depends_on("m4", type="build") depends_on("py-cython", type="build") + depends_on("py-torch", when="+pytorch") + conflicts("+metatomic ~pytorch", msg="metatomic support requires PyTorch") + depends_on("libmetatomic-torch", when="+metatomic") + # https://github.com/plumed/plumed2/issues/1256 conflicts("^py-cython@3.1:", when="@:2.9.3") force_autoreconf = True - parallel = False + parallel = True def apply_patch(self, other): # The name of MD engines differ slightly from the ones used in Spack @@ -239,6 +259,10 @@ def configure_args(self): # the issue saying we have no LD_RO executable. configure_opts = ["--disable-ld-r"] + configure_opts.append( + "--disable-doc" + ) + # If using MPI then ensure the correct compiler wrapper is used. if "+mpi" in spec: configure_opts.extend(["--enable-mpi", "CXX={0}".format(spec["mpi"].mpicxx)]) @@ -247,20 +271,50 @@ def configure_args(self): # additional argument is required to allow it to build. if spec.satisfies("^[virtuals=mpi] intel-oneapi-mpi"): configure_opts.extend(["STATIC_LIBS=-mt_mpi"]) + + enable_libmetatomic = self.spec.satisfies("+metatomic") + enable_libtorch = self.spec.satisfies("+pytorch") or self.spec.satisfies("+metatomic") + extra_ldflags = [] extra_libs = [] + extra_cppflags = [] # Set flags to help find gsl if "+gsl" in spec: gsl_libs = spec["gsl"].libs blas_libs = spec["blas"].libs - extra_libs.append((gsl_libs + blas_libs).ld_flags) + extra_ldflags.append((gsl_libs + blas_libs).search_flags) + extra_libs.append((gsl_libs + blas_libs).link_flags) + extra_cppflags.extend([spec["gsl"].headers.include_flags, spec["blas"].headers.include_flags]) # Set flags to help with ArrayFire if "arrayfire=none" not in spec: libaf = "arrayfire:{0}".format(spec.variants["arrayfire"].value) - extra_libs.append(spec[libaf].libs.search_flags) + extra_ldflags.append(spec[libaf].libs.search_flags) + extra_libs.append(spec[libaf].libs.link_flags) + extra_cppflags.append(spec[libaf].headers.include_flags) + # Set flags to help with PyTorch + if enable_libtorch: + pytorch_path = Path(spec["py-torch"].package.cmake_prefix_paths[0]).parent.parent + extra_ldflags.append(spec["py-torch"].libs.search_flags) + extra_libs.append(spec["py-torch"].libs.link_flags) + extra_cppflags.extend([ + f"-I{pytorch_path / 'include'}", + f"-I{pytorch_path / 'include' / 'torch' / 'csrc' / 'api' / 'include'}", + ]) + print(extra_cppflags[-2:]) + if enable_libmetatomic: + for l in ["libmetatensor", "libmetatensor-torch", "libmetatomic-torch"]: + extra_ldflags.append(spec[l].libs.search_flags) + extra_libs.append(spec[l].libs.link_flags) + extra_cppflags.append(spec[l].headers.include_flags) + + if extra_ldflags: + configure_opts.append("LDFLAGS={0}".format(" ".join(extra_ldflags))) if extra_libs: - configure_opts.append("LDFLAGS={0}".format(" ".join(extra_libs))) + configure_opts.append("LIBS={0}".format(" ".join(extra_libs))) + + if extra_cppflags: + configure_opts.append("CPPFLAGS={0}".format(" ".join(extra_cppflags))) # Additional arguments configure_opts.extend( @@ -270,11 +324,15 @@ def configure_args(self): "--enable-af_cpu={0}".format("yes" if "arrayfire=cpu" in spec else "no"), "--enable-af_cuda={0}".format("yes" if "arrayfire=cuda" in spec else "no"), "--enable-af_ocl={0}".format("yes" if "arrayfire=ocl" in spec else "no"), + "--enable-libtorch={0}".format("yes" if enable_libtorch else "no"), + "--enable-libmetatomic={0}".format("yes" if enable_libmetatomic else "no"), ] ) # Construct list of optional modules - optional_modules = self.spec.variants["optional_modules"].value + optional_modules = spec.variants["optional_modules"].value + + # Predefined set of modules if "all" in optional_modules: selected_modules = "all" @@ -282,9 +340,16 @@ def configure_args(self): selected_modules = "reset" # Custom set of modules else: + # Ensure modules from variants + if spec.satisfies("+pytorch") or sepec.satisfies("+metatomic"): + optional_modules += ("pytorch",) + if spec.satisfies("+libmetatomic"): + optional_modules += ("metatomic",) + selected_modules = "none" for mod in optional_modules: selected_modules += ":+{0}".format(mod) + configure_opts.append("--enable-modules={0}".format(selected_modules)) return configure_opts diff --git a/repos/spack_repo/builtin/packages/py_torch/package.py b/repos/spack_repo/builtin/packages/py_torch/package.py index 59bb0b3678e..f9258560f87 100644 --- a/repos/spack_repo/builtin/packages/py_torch/package.py +++ b/repos/spack_repo/builtin/packages/py_torch/package.py @@ -803,3 +803,9 @@ def install_test(self): def cmake_prefix_paths(self): cmake_prefix_paths = [join_path(python_platlib, "torch", "share", "cmake")] return cmake_prefix_paths + + @property + def libs(self): + return find_libraries("libtorch*", root=python_platlib, recursive=True, shared=True) + find_libraries( + "libc10*", root=python_platlib, recursive=True, shared=True + ) From f92af4607af12e3a2c0f0fb5b7f3a674da448b36 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Wed, 15 Oct 2025 18:15:25 +0200 Subject: [PATCH 02/12] pytorch as variant only --- repos/spack_repo/builtin/packages/plumed/package.py | 1 - 1 file changed, 1 deletion(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index 122445a3fcb..08240ebde74 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -115,7 +115,6 @@ class Plumed(AutotoolsPackage): conditional("opes", when="@2.7:"), conditional("pamm", when="optional_modules=adjmat"), "piv", - conditional("pytorch", when="@2.9:"), conditional("s2cm", when="@2.8:"), conditional("sasa", when="@2.8:"), "secondarystructure", From cc6ee572f9a44160bfba15d9400e27b77a929e17 Mon Sep 17 00:00:00 2001 From: RMeli Date: Wed, 15 Oct 2025 16:21:49 +0000 Subject: [PATCH 03/12] [@spackbot] updating style on behalf of RMeli --- .../builtin/packages/plumed/package.py | 36 +++++++------------ .../builtin/packages/py_torch/package.py | 6 ++-- 2 files changed, 16 insertions(+), 26 deletions(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index 08240ebde74..863204110bb 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -6,7 +6,6 @@ import os from pathlib import Path - from spack_repo.builtin.build_systems.autotools import AutotoolsPackage from spack.package import * @@ -141,18 +140,8 @@ class Plumed(AutotoolsPackage): values=("none", "cpu", "cuda", "opencl"), description="Activates FireArray support", ) - variant( - "pytorch", - default=False, - description="Activates PyTorch support", - when="@2.9:", - ) - variant( - "metatomic", - default=False, - description="Activates PyTorch support", - when="@2.10:", - ) + variant("pytorch", default=False, description="Activates PyTorch support", when="@2.9:") + variant("metatomic", default=False, description="Activates PyTorch support", when="@2.10:") depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated @@ -258,9 +247,7 @@ def configure_args(self): # the issue saying we have no LD_RO executable. configure_opts = ["--disable-ld-r"] - configure_opts.append( - "--disable-doc" - ) + configure_opts.append("--disable-doc") # If using MPI then ensure the correct compiler wrapper is used. if "+mpi" in spec: @@ -270,7 +257,7 @@ def configure_args(self): # additional argument is required to allow it to build. if spec.satisfies("^[virtuals=mpi] intel-oneapi-mpi"): configure_opts.extend(["STATIC_LIBS=-mt_mpi"]) - + enable_libmetatomic = self.spec.satisfies("+metatomic") enable_libtorch = self.spec.satisfies("+pytorch") or self.spec.satisfies("+metatomic") @@ -283,7 +270,9 @@ def configure_args(self): blas_libs = spec["blas"].libs extra_ldflags.append((gsl_libs + blas_libs).search_flags) extra_libs.append((gsl_libs + blas_libs).link_flags) - extra_cppflags.extend([spec["gsl"].headers.include_flags, spec["blas"].headers.include_flags]) + extra_cppflags.extend( + [spec["gsl"].headers.include_flags, spec["blas"].headers.include_flags] + ) # Set flags to help with ArrayFire if "arrayfire=none" not in spec: libaf = "arrayfire:{0}".format(spec.variants["arrayfire"].value) @@ -295,10 +284,12 @@ def configure_args(self): pytorch_path = Path(spec["py-torch"].package.cmake_prefix_paths[0]).parent.parent extra_ldflags.append(spec["py-torch"].libs.search_flags) extra_libs.append(spec["py-torch"].libs.link_flags) - extra_cppflags.extend([ - f"-I{pytorch_path / 'include'}", - f"-I{pytorch_path / 'include' / 'torch' / 'csrc' / 'api' / 'include'}", - ]) + extra_cppflags.extend( + [ + f"-I{pytorch_path / 'include'}", + f"-I{pytorch_path / 'include' / 'torch' / 'csrc' / 'api' / 'include'}", + ] + ) print(extra_cppflags[-2:]) if enable_libmetatomic: for l in ["libmetatensor", "libmetatensor-torch", "libmetatomic-torch"]: @@ -331,7 +322,6 @@ def configure_args(self): # Construct list of optional modules optional_modules = spec.variants["optional_modules"].value - # Predefined set of modules if "all" in optional_modules: selected_modules = "all" diff --git a/repos/spack_repo/builtin/packages/py_torch/package.py b/repos/spack_repo/builtin/packages/py_torch/package.py index f9258560f87..f0379386338 100644 --- a/repos/spack_repo/builtin/packages/py_torch/package.py +++ b/repos/spack_repo/builtin/packages/py_torch/package.py @@ -806,6 +806,6 @@ def cmake_prefix_paths(self): @property def libs(self): - return find_libraries("libtorch*", root=python_platlib, recursive=True, shared=True) + find_libraries( - "libc10*", root=python_platlib, recursive=True, shared=True - ) + return find_libraries( + "libtorch*", root=python_platlib, recursive=True, shared=True + ) + find_libraries("libc10*", root=python_platlib, recursive=True, shared=True) From a2ef05b1ff6b545447d0323c87a4ed3f5d48dbb8 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Wed, 15 Oct 2025 18:25:38 +0200 Subject: [PATCH 04/12] update desc --- repos/spack_repo/builtin/packages/plumed/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index 863204110bb..700e26e666c 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -141,7 +141,7 @@ class Plumed(AutotoolsPackage): description="Activates FireArray support", ) variant("pytorch", default=False, description="Activates PyTorch support", when="@2.9:") - variant("metatomic", default=False, description="Activates PyTorch support", when="@2.10:") + variant("metatomic", default=False, description="Activates metatomic support", when="@2.10:") depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated From 40018a7a574837747589c68be11273580e34a6a3 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Tue, 21 Oct 2025 09:57:28 +0200 Subject: [PATCH 05/12] fix ci --- repos/spack_repo/builtin/packages/plumed/package.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index 700e26e666c..963dbc8115b 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -292,10 +292,10 @@ def configure_args(self): ) print(extra_cppflags[-2:]) if enable_libmetatomic: - for l in ["libmetatensor", "libmetatensor-torch", "libmetatomic-torch"]: - extra_ldflags.append(spec[l].libs.search_flags) - extra_libs.append(spec[l].libs.link_flags) - extra_cppflags.append(spec[l].headers.include_flags) + for libname in ["libmetatensor", "libmetatensor-torch", "libmetatomic-torch"]: + extra_ldflags.append(spec[libname].libs.search_flags) + extra_libs.append(spec[libname].libs.link_flags) + extra_cppflags.append(spec[libnamd].headers.include_flags) if extra_ldflags: configure_opts.append("LDFLAGS={0}".format(" ".join(extra_ldflags))) From 030dbc8e6dec509819b9f65da2368b3f08e5acd3 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Tue, 21 Oct 2025 10:09:30 +0200 Subject: [PATCH 06/12] update --- repos/spack_repo/builtin/packages/plumed/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index 963dbc8115b..ade35e53f4d 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -284,6 +284,8 @@ def configure_args(self): pytorch_path = Path(spec["py-torch"].package.cmake_prefix_paths[0]).parent.parent extra_ldflags.append(spec["py-torch"].libs.search_flags) extra_libs.append(spec["py-torch"].libs.link_flags) + # Add include paths manually + # Spack HeaderList.cpp_flags does not support include paths within include paths extra_cppflags.extend( [ f"-I{pytorch_path / 'include'}", From 8e93c55fc6ba64d4e342832fc9175f8b2eae46da Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Tue, 21 Oct 2025 15:07:15 +0200 Subject: [PATCH 07/12] Apply suggestions from code review Co-authored-by: Guillaume Fraux --- repos/spack_repo/builtin/packages/plumed/package.py | 1 - 1 file changed, 1 deletion(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index ade35e53f4d..b6ab1200637 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -292,7 +292,6 @@ def configure_args(self): f"-I{pytorch_path / 'include' / 'torch' / 'csrc' / 'api' / 'include'}", ] ) - print(extra_cppflags[-2:]) if enable_libmetatomic: for libname in ["libmetatensor", "libmetatensor-torch", "libmetatomic-torch"]: extra_ldflags.append(spec[libname].libs.search_flags) From 91ab998c27048176db65df5f39ef95cdbbe9279c Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Mon, 10 Nov 2025 10:24:37 +0100 Subject: [PATCH 08/12] update gromacs --- .../spack_repo/builtin/packages/gromacs/package.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/repos/spack_repo/builtin/packages/gromacs/package.py b/repos/spack_repo/builtin/packages/gromacs/package.py index 2881e1e2be7..96f64073aec 100644 --- a/repos/spack_repo/builtin/packages/gromacs/package.py +++ b/repos/spack_repo/builtin/packages/gromacs/package.py @@ -221,6 +221,7 @@ class Gromacs(CMakePackage, CudaPackage): depends_on("mpi", when="+mpi") + # Plumed 2.10.0 needs Gromacs 2025.0, 2024.3, 2023.5, 2022.5 # Plumed 2.9.0 needs Gromacs 2023, 2022.5, 2021.7, 2020.7 # Plumed 2.8.3 needs Gromacs 2022.5, 2021.7, 2020.7, 2019.6 # Plumed 2.8.2 needs Gromacs 2022.5, 2021.7, 2020.7, 2019.6 @@ -258,15 +259,19 @@ class Gromacs(CMakePackage, CudaPackage): # see https://github.com/spack/spack/releases/tag/v0.20.0 plumed_patches = { - "=2023": "2.9.1", - "2022.5": "2.8.2:2.9.1", + "2025.0": "2.10.0", + "2024.3": "2.9.3:2.10.0", + "2024.2": "2.9.2", + "2023.5": "2.9.2:2.10.0", + "=2023": "2.9.0:2.9.1", + "2022.5": "2.8.2:2.10.0", "2022.3": "2.8.1", - "2021.7": "2.8.2:2.9.1", + "2021.7": "2.8.2:2.9.4", "2021.6": "2.8.1", "2021.5": "2.7.5:2.7.6", "2021.4": "2.7.3:2.8.0", "=2021": "2.7.1:2.7.2", - "2020.7": "2.8.1:2.9.1", + "2020.7": "2.8.1:2.9.4", "2020.6": "2.7.2:2.8.0", "2020.5": "2.7.1", "2020.4": "2.6.2:2.7.0", From 866cf5e2c0eb66ce5cbbbade95949d2c59af6496 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Tue, 11 Nov 2025 10:07:18 +0100 Subject: [PATCH 09/12] Update repos/spack_repo/builtin/packages/plumed/package.py --- repos/spack_repo/builtin/packages/plumed/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index b6ab1200637..b6d05e04ac8 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -259,7 +259,7 @@ def configure_args(self): configure_opts.extend(["STATIC_LIBS=-mt_mpi"]) enable_libmetatomic = self.spec.satisfies("+metatomic") - enable_libtorch = self.spec.satisfies("+pytorch") or self.spec.satisfies("+metatomic") + enable_libtorch = self.spec.satisfies("+pytorch") extra_ldflags = [] extra_libs = [] From 9da9232e1b001bc40a19eb2f46de95ec426e6b38 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Mon, 17 Nov 2025 14:38:24 +0100 Subject: [PATCH 10/12] fix typo --- repos/spack_repo/builtin/packages/plumed/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index b6d05e04ac8..ce37ecd2f99 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -296,7 +296,7 @@ def configure_args(self): for libname in ["libmetatensor", "libmetatensor-torch", "libmetatomic-torch"]: extra_ldflags.append(spec[libname].libs.search_flags) extra_libs.append(spec[libname].libs.link_flags) - extra_cppflags.append(spec[libnamd].headers.include_flags) + extra_cppflags.append(spec[libname].headers.include_flags) if extra_ldflags: configure_opts.append("LDFLAGS={0}".format(" ".join(extra_ldflags))) From 3a0949c7655fbb917dbc3dc4d028f04960d77b52 Mon Sep 17 00:00:00 2001 From: Rocco Meli Date: Mon, 24 Nov 2025 16:41:20 +0100 Subject: [PATCH 11/12] link to python --- repos/spack_repo/builtin/packages/plumed/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index ce37ecd2f99..d457dada8ec 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -284,12 +284,15 @@ def configure_args(self): pytorch_path = Path(spec["py-torch"].package.cmake_prefix_paths[0]).parent.parent extra_ldflags.append(spec["py-torch"].libs.search_flags) extra_libs.append(spec["py-torch"].libs.link_flags) + extra_ldflags.append(spec["python"].libs.search_flags) + extra_libs.append(spec["python"].libs.link_flags) # Add include paths manually # Spack HeaderList.cpp_flags does not support include paths within include paths extra_cppflags.extend( [ f"-I{pytorch_path / 'include'}", f"-I{pytorch_path / 'include' / 'torch' / 'csrc' / 'api' / 'include'}", + spec["python"].headers.include_flags ] ) if enable_libmetatomic: @@ -343,3 +346,4 @@ def configure_args(self): configure_opts.append("--enable-modules={0}".format(selected_modules)) return configure_opts + From 62e47b901106fc28dfbcee48d60837216925c383 Mon Sep 17 00:00:00 2001 From: RMeli Date: Mon, 24 Nov 2025 15:50:37 +0000 Subject: [PATCH 12/12] [@spackbot] updating style on behalf of RMeli --- repos/spack_repo/builtin/packages/plumed/package.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/repos/spack_repo/builtin/packages/plumed/package.py b/repos/spack_repo/builtin/packages/plumed/package.py index d457dada8ec..e0ae70d23e2 100644 --- a/repos/spack_repo/builtin/packages/plumed/package.py +++ b/repos/spack_repo/builtin/packages/plumed/package.py @@ -292,7 +292,7 @@ def configure_args(self): [ f"-I{pytorch_path / 'include'}", f"-I{pytorch_path / 'include' / 'torch' / 'csrc' / 'api' / 'include'}", - spec["python"].headers.include_flags + spec["python"].headers.include_flags, ] ) if enable_libmetatomic: @@ -346,4 +346,3 @@ def configure_args(self): configure_opts.append("--enable-modules={0}".format(selected_modules)) return configure_opts -