Skip to content

Commit 65b247c

Browse files
szabgabrik86189
authored andcommitted
Avoid using the same file twice in SUMMARY.md
See rust-lang#2612
1 parent 53c9604 commit 65b247c

File tree

1 file changed

+108
-0
lines changed

1 file changed

+108
-0
lines changed

src/book/summary.rs

+108
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ use log::{debug, trace, warn};
33
use memchr::Memchr;
44
use pulldown_cmark::{DefaultBrokenLinkCallback, Event, HeadingLevel, Tag, TagEnd};
55
use serde::{Deserialize, Serialize};
6+
use std::collections::HashSet;
67
use std::fmt::{self, Display, Formatter};
78
use std::ops::{Deref, DerefMut};
89
use std::path::{Path, PathBuf};
@@ -245,6 +246,11 @@ impl<'a> SummaryParser<'a> {
245246
.parse_affix(false)
246247
.with_context(|| "There was an error parsing the suffix chapters")?;
247248

249+
let mut files = HashSet::new();
250+
for part in [&prefix_chapters, &numbered_chapters, &suffix_chapters] {
251+
self.check_for_duplicates(&part, &mut files)?;
252+
}
253+
248254
Ok(Summary {
249255
title,
250256
prefix_chapters,
@@ -253,6 +259,29 @@ impl<'a> SummaryParser<'a> {
253259
})
254260
}
255261

262+
/// Recursively check for duplicate files in the summary items.
263+
fn check_for_duplicates<'b>(
264+
&self,
265+
items: &'b [SummaryItem],
266+
files: &mut HashSet<&'b PathBuf>,
267+
) -> Result<()> {
268+
for item in items {
269+
if let SummaryItem::Link(link) = item {
270+
if let Some(location) = &link.location {
271+
if !files.insert(location) {
272+
bail!(anyhow::anyhow!(
273+
"Duplicate file in SUMMARY.md: {:?}",
274+
location
275+
));
276+
}
277+
}
278+
// Recursively check nested items
279+
self.check_for_duplicates(&link.nested_items, files)?;
280+
}
281+
}
282+
Ok(())
283+
}
284+
256285
/// Parse the affix chapters.
257286
fn parse_affix(&mut self, is_prefix: bool) -> Result<Vec<SummaryItem>> {
258287
let mut items = Vec::new();
@@ -1127,4 +1156,83 @@ mod tests {
11271156
let got = parser.parse_affix(false).unwrap();
11281157
assert_eq!(got, should_be);
11291158
}
1159+
1160+
#[test]
1161+
fn duplicate_entries_1() {
1162+
let src = r#"
1163+
# Summary
1164+
- [A](./a.md)
1165+
- [A](./a.md)
1166+
"#;
1167+
1168+
let res = parse_summary(src);
1169+
assert!(res.is_err());
1170+
let error_message = res.err().unwrap().to_string();
1171+
assert_eq!(error_message, r#"Duplicate file in SUMMARY.md: "./a.md""#);
1172+
}
1173+
1174+
#[test]
1175+
fn duplicate_entries_2() {
1176+
let src = r#"
1177+
# Summary
1178+
- [A](./a.md)
1179+
- [A](./a.md)
1180+
"#;
1181+
1182+
let res = parse_summary(src);
1183+
assert!(res.is_err());
1184+
let error_message = res.err().unwrap().to_string();
1185+
assert_eq!(error_message, r#"Duplicate file in SUMMARY.md: "./a.md""#);
1186+
}
1187+
#[test]
1188+
fn duplicate_entries_3() {
1189+
let src = r#"
1190+
# Summary
1191+
- [A](./a.md)
1192+
- [B](./b.md)
1193+
- [A](./a.md)
1194+
"#;
1195+
1196+
let res = parse_summary(src);
1197+
assert!(res.is_err());
1198+
let error_message = res.err().unwrap().to_string();
1199+
assert_eq!(error_message, r#"Duplicate file in SUMMARY.md: "./a.md""#);
1200+
}
1201+
1202+
#[test]
1203+
fn duplicate_entries_4() {
1204+
let src = r#"
1205+
# Summary
1206+
[A](./a.md)
1207+
- [B](./b.md)
1208+
- [A](./a.md)
1209+
"#;
1210+
1211+
let res = parse_summary(src);
1212+
assert!(res.is_err());
1213+
let error_message = res.err().unwrap().to_string();
1214+
assert_eq!(error_message, r#"Duplicate file in SUMMARY.md: "./a.md""#);
1215+
}
1216+
1217+
#[test]
1218+
fn duplicate_entries_5() {
1219+
let src = r#"
1220+
# Summary
1221+
[A](./a.md)
1222+
1223+
# hi
1224+
- [B](./b.md)
1225+
1226+
# bye
1227+
1228+
---
1229+
1230+
[A](./a.md)
1231+
"#;
1232+
1233+
let res = parse_summary(src);
1234+
assert!(res.is_err());
1235+
let error_message = res.err().unwrap().to_string();
1236+
assert_eq!(error_message, r#"Duplicate file in SUMMARY.md: "./a.md""#);
1237+
}
11301238
}

0 commit comments

Comments
 (0)