Skip to content

Commit f73aa68

Browse files
feat: Support RDMA stats in exporter (#45)
* Update unit tests and e2e test fixtures * Correct scenario name in Makefile --------- Signed-off-by: Mahendra Paipuri <[email protected]>
1 parent 7b1f38d commit f73aa68

10 files changed

+270
-12
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ ifeq ($(CGO_BUILD), 0)
141141
test-e2e-update: build pkg/collector/fixtures/sys/.unpacked pkg/collector/fixtures/proc/.unpacked
142142
@echo ">> updating end-to-end tests outputs"
143143
./scripts/e2e-test.sh -s exporter-cgroups-v1 -u || true
144-
./scripts/e2e-test.sh -s exporter-cgroups-v2-nvidia-ipmitutil -u || true
144+
./scripts/e2e-test.sh -s exporter-cgroups-v2-nvidia-ipmiutil -u || true
145145
./scripts/e2e-test.sh -s exporter-cgroups-v2-amd-ipmitool -u || true
146146
./scripts/e2e-test.sh -s exporter-cgroups-v2-nogpu -u || true
147147
./scripts/e2e-test.sh -s exporter-cgroups-v2-procfs -u || true

pkg/collector/fixtures/output/e2e-test-cgroupsv1-output.txt

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,18 @@ ceems_slurm_job_memory_total_bytes{hostname="",manager="slurm",project="testacc3
9393
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 4.0194048e+07
9494
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 4.0194048e+07
9595
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 4.0194048e+07
96+
# HELP ceems_slurm_job_rdma_hca_handles Current number of RDMA HCA handles
97+
# TYPE ceems_slurm_job_rdma_hca_handles gauge
98+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 479
99+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 289
100+
ceems_slurm_job_rdma_hca_handles{device="hfi1_1",hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 1479
101+
ceems_slurm_job_rdma_hca_handles{device="hfi1_2",hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 2479
102+
# HELP ceems_slurm_job_rdma_hca_objects Current number of RDMA HCA objects
103+
# TYPE ceems_slurm_job_rdma_hca_objects gauge
104+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 479
105+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 289
106+
ceems_slurm_job_rdma_hca_objects{device="hfi1_1",hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 1479
107+
ceems_slurm_job_rdma_hca_objects{device="hfi1_2",hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 2479
96108
# HELP ceems_slurm_jobs Total number of jobs
97109
# TYPE ceems_slurm_jobs gauge
98110
ceems_slurm_jobs{hostname="",manager="slurm"} 3

pkg/collector/fixtures/output/e2e-test-cgroupsv2-all-metrics-output.txt

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,18 @@ ceems_slurm_job_memsw_total_bytes{hostname="",manager="slurm",project="testacc3"
118118
ceems_slurm_job_memsw_used_bytes{hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 0
119119
ceems_slurm_job_memsw_used_bytes{hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 0
120120
ceems_slurm_job_memsw_used_bytes{hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 0
121+
# HELP ceems_slurm_job_rdma_hca_handles Current number of RDMA HCA handles
122+
# TYPE ceems_slurm_job_rdma_hca_handles gauge
123+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 479
124+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 289
125+
ceems_slurm_job_rdma_hca_handles{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 1479
126+
ceems_slurm_job_rdma_hca_handles{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 2479
127+
# HELP ceems_slurm_job_rdma_hca_objects Current number of RDMA HCA objects
128+
# TYPE ceems_slurm_job_rdma_hca_objects gauge
129+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 479
130+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 289
131+
ceems_slurm_job_rdma_hca_objects{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 1479
132+
ceems_slurm_job_rdma_hca_objects{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 2479
121133
# HELP ceems_slurm_jobs Total number of jobs
122134
# TYPE ceems_slurm_jobs gauge
123135
ceems_slurm_jobs{hostname="",manager="slurm"} 3

pkg/collector/fixtures/output/e2e-test-cgroupsv2-amd-ipmitool-output.txt

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,18 @@ ceems_slurm_job_memory_total_bytes{hostname="",manager="slurm",project="testacc3
9393
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 4.111491072e+09
9494
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 4.111491072e+09
9595
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 4.111491072e+09
96+
# HELP ceems_slurm_job_rdma_hca_handles Current number of RDMA HCA handles
97+
# TYPE ceems_slurm_job_rdma_hca_handles gauge
98+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 479
99+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 289
100+
ceems_slurm_job_rdma_hca_handles{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 1479
101+
ceems_slurm_job_rdma_hca_handles{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 2479
102+
# HELP ceems_slurm_job_rdma_hca_objects Current number of RDMA HCA objects
103+
# TYPE ceems_slurm_job_rdma_hca_objects gauge
104+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 479
105+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 289
106+
ceems_slurm_job_rdma_hca_objects{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 1479
107+
ceems_slurm_job_rdma_hca_objects{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 2479
96108
# HELP ceems_slurm_jobs Total number of jobs
97109
# TYPE ceems_slurm_jobs gauge
98110
ceems_slurm_jobs{hostname="",manager="slurm"} 3

pkg/collector/fixtures/output/e2e-test-cgroupsv2-nogpu-output.txt

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,18 @@ ceems_slurm_job_memory_total_bytes{hostname="",manager="slurm",project="testacc3
8787
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 4.111491072e+09
8888
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 4.111491072e+09
8989
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 4.111491072e+09
90+
# HELP ceems_slurm_job_rdma_hca_handles Current number of RDMA HCA handles
91+
# TYPE ceems_slurm_job_rdma_hca_handles gauge
92+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 479
93+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 289
94+
ceems_slurm_job_rdma_hca_handles{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 1479
95+
ceems_slurm_job_rdma_hca_handles{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 2479
96+
# HELP ceems_slurm_job_rdma_hca_objects Current number of RDMA HCA objects
97+
# TYPE ceems_slurm_job_rdma_hca_objects gauge
98+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 479
99+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 289
100+
ceems_slurm_job_rdma_hca_objects{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 1479
101+
ceems_slurm_job_rdma_hca_objects{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 2479
90102
# HELP ceems_slurm_jobs Total number of jobs
91103
# TYPE ceems_slurm_jobs gauge
92104
ceems_slurm_jobs{hostname="",manager="slurm"} 3

pkg/collector/fixtures/output/e2e-test-cgroupsv2-nvidia-ipmiutil-output.txt

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,18 @@ ceems_slurm_job_memory_total_bytes{hostname="",manager="slurm",project="testacc3
9393
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc",user="testusr",uuid="1009248"} 4.111491072e+09
9494
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="1009249"} 4.111491072e+09
9595
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="1009250"} 4.111491072e+09
96+
# HELP ceems_slurm_job_rdma_hca_handles Current number of RDMA HCA handles
97+
# TYPE ceems_slurm_job_rdma_hca_handles gauge
98+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="1009249"} 479
99+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="1009250"} 289
100+
ceems_slurm_job_rdma_hca_handles{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="1009249"} 1479
101+
ceems_slurm_job_rdma_hca_handles{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="1009249"} 2479
102+
# HELP ceems_slurm_job_rdma_hca_objects Current number of RDMA HCA objects
103+
# TYPE ceems_slurm_job_rdma_hca_objects gauge
104+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="1009249"} 479
105+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="1009250"} 289
106+
ceems_slurm_job_rdma_hca_objects{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="1009249"} 1479
107+
ceems_slurm_job_rdma_hca_objects{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="1009249"} 2479
96108
# HELP ceems_slurm_jobs Total number of jobs
97109
# TYPE ceems_slurm_jobs gauge
98110
ceems_slurm_jobs{hostname="",manager="slurm"} 3

pkg/collector/fixtures/output/e2e-test-cgroupsv2-procfs-output.txt

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,18 @@ ceems_slurm_job_memory_total_bytes{hostname="",manager="slurm",project="testacc3
9393
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc",user="testusr",uuid="0f0ac288-dbd4-a9a3-df3a-ab14ef9d51d5"} 4.111491072e+09
9494
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 4.111491072e+09
9595
ceems_slurm_job_memory_used_bytes{hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 4.111491072e+09
96+
# HELP ceems_slurm_job_rdma_hca_handles Current number of RDMA HCA handles
97+
# TYPE ceems_slurm_job_rdma_hca_handles gauge
98+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 479
99+
ceems_slurm_job_rdma_hca_handles{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 289
100+
ceems_slurm_job_rdma_hca_handles{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 1479
101+
ceems_slurm_job_rdma_hca_handles{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 2479
102+
# HELP ceems_slurm_job_rdma_hca_objects Current number of RDMA HCA objects
103+
# TYPE ceems_slurm_job_rdma_hca_objects gauge
104+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 479
105+
ceems_slurm_job_rdma_hca_objects{device="hfi1_0",hostname="",manager="slurm",project="testacc3",user="testusr2",uuid="77caf800-acd0-1fd2-7211-644e46814fc1"} 289
106+
ceems_slurm_job_rdma_hca_objects{device="hfi1_1",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 1479
107+
ceems_slurm_job_rdma_hca_objects{device="hfi1_2",hostname="",manager="slurm",project="testacc2",user="testusr2",uuid="018ce2fe-b3f9-632a-7507-0e01c2687de5"} 2479
96108
# HELP ceems_slurm_jobs Total number of jobs
97109
# TYPE ceems_slurm_jobs gauge
98110
ceems_slurm_jobs{hostname="",manager="slurm"} 3

pkg/collector/fixtures/sys.ttar

Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2543,6 +2543,101 @@ Lines: 5
25432543
9870
25442544
Mode: 644
25452545
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2546+
Directory: sys/fs/cgroup/rdma
2547+
Mode: 775
2548+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2549+
Directory: sys/fs/cgroup/rdma/slurm
2550+
Mode: 775
2551+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2552+
Directory: sys/fs/cgroup/rdma/slurm/uid_1000
2553+
Mode: 775
2554+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2555+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/cgroup.clone_children
2556+
Lines: 0
2557+
Mode: 664
2558+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2559+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/cgroup.procs
2560+
Lines: 0
2561+
Mode: 664
2562+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2563+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/cgroup.sane_behavior
2564+
Lines: 0
2565+
Mode: 664
2566+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2567+
Directory: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009248
2568+
Mode: 775
2569+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2570+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009248/cgroup.clone_children
2571+
Lines: 0
2572+
Mode: 664
2573+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2574+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009248/cgroup.procs
2575+
Lines: 0
2576+
Mode: 664
2577+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2578+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009248/notify_on_release
2579+
Lines: 0
2580+
Mode: 664
2581+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2582+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009248/rdma.current
2583+
Lines: 3
2584+
hfi1_0 hca_handle=479 hca_object=340
2585+
hfi1_1 hca_handle=1479 hca_object=1340
2586+
hfi1_2 hca_handle=2479 hca_object=2340EOF
2587+
Mode: 664
2588+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2589+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009248/rdma.max
2590+
Lines: 3
2591+
hfi1_0 hca_handle=max hca_object=max
2592+
hfi1_1 hca_handle=max hca_object=max
2593+
hfi1_2 hca_handle=max hca_object=maxEOF
2594+
Mode: 664
2595+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2596+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009248/tasks
2597+
Lines: 0
2598+
Mode: 664
2599+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2600+
Directory: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009249
2601+
Mode: 775
2602+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2603+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009249/cgroup.clone_children
2604+
Lines: 0
2605+
Mode: 664
2606+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2607+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009249/cgroup.procs
2608+
Lines: 0
2609+
Mode: 664
2610+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2611+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009249/notify_on_release
2612+
Lines: 0
2613+
Mode: 664
2614+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2615+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009249/rdma.current
2616+
Lines: 1
2617+
hfi1_0 hca_handle=289 hca_object=1000EOF
2618+
Mode: 664
2619+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2620+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009249/rdma.max
2621+
Lines: 1
2622+
hfi1_0 hca_handle=max hca_object=maxEOF
2623+
Mode: 664
2624+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2625+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/job_1009249/tasks
2626+
Lines: 0
2627+
Mode: 664
2628+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2629+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/notify_on_release
2630+
Lines: 0
2631+
Mode: 664
2632+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2633+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/release_agent
2634+
Lines: 0
2635+
Mode: 664
2636+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
2637+
Path: sys/fs/cgroup/rdma/slurm/uid_1000/tasks
2638+
Lines: 0
2639+
Mode: 664
2640+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
25462641
Directory: sys/fs/cgroup/system.slice
25472642
Mode: 775
25482643
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@@ -6303,6 +6398,20 @@ Lines: 1
63036398
max
63046399
Mode: 640
63056400
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
6401+
Path: sys/fs/cgroup/system.slice/slurmstepd.scope/job_1009249/rdma.current
6402+
Lines: 3
6403+
hfi1_0 hca_handle=479 hca_object=340
6404+
hfi1_1 hca_handle=1479 hca_object=1340
6405+
hfi1_2 hca_handle=2479 hca_object=2340EOF
6406+
Mode: 664
6407+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
6408+
Path: sys/fs/cgroup/system.slice/slurmstepd.scope/job_1009249/rdma.max
6409+
Lines: 3
6410+
hfi1_0 hca_handle=max hca_object=max
6411+
hfi1_1 hca_handle=max hca_object=max
6412+
hfi1_2 hca_handle=max hca_object=maxEOF
6413+
Mode: 664
6414+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
63066415
Directory: sys/fs/cgroup/system.slice/slurmstepd.scope/job_1009249/step_3
63076416
Mode: 775
63086417
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@@ -9685,6 +9794,16 @@ Lines: 1
96859794
max
96869795
Mode: 640
96879796
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9797+
Path: sys/fs/cgroup/system.slice/slurmstepd.scope/job_1009250/rdma.current
9798+
Lines: 1
9799+
hfi1_0 hca_handle=289 hca_object=1000EOF
9800+
Mode: 664
9801+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
9802+
Path: sys/fs/cgroup/system.slice/slurmstepd.scope/job_1009250/rdma.max
9803+
Lines: 1
9804+
hfi1_0 hca_handle=max hca_object=maxEOF
9805+
Mode: 664
9806+
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
96889807
Directory: sys/fs/cgroup/system.slice/slurmstepd.scope/job_1009250/step_3
96899808
Mode: 775
96909809
# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

0 commit comments

Comments
 (0)