diff --git a/src/renderer/html_handlebars/hbs_renderer.rs b/src/renderer/html_handlebars/hbs_renderer.rs
index a545eed2a4..234dc92d1d 100644
--- a/src/renderer/html_handlebars/hbs_renderer.rs
+++ b/src/renderer/html_handlebars/hbs_renderer.rs
@@ -57,11 +57,11 @@ impl HtmlHandlebars {
let content = ch.content.clone();
let content = utils::render_markdown(&content, ctx.html_config.curly_quotes);
- let fixed_content = utils::render_markdown_with_path(
+ let fixed_content = utils::render_markdown_with_path_and_redirects(
&ch.content,
ctx.html_config.curly_quotes,
Some(path),
- ctx.html_config.redirect,
+ &ctx.html_config.redirect,
);
if !ctx.is_index && ctx.html_config.print.page_break {
// Add page break between chapters
@@ -73,7 +73,7 @@ impl HtmlHandlebars {
let path_id = {
let mut base = path.display().to_string();
if base.ends_with(".md") {
- base.replace_range(base.len() - 3.., "");
+ base.truncate(base.len() - 3);
}
&base
.replace("/", "-")
diff --git a/src/utils/mod.rs b/src/utils/mod.rs
index 9143da2a99..d9c161500c 100644
--- a/src/utils/mod.rs
+++ b/src/utils/mod.rs
@@ -146,7 +146,7 @@ pub fn normalize_path_id(mut path: String) -> String {
fn adjust_links<'a>(
event: Event<'a>,
path: Option<&Path>,
- redirects: HashMap,
+ redirects: &HashMap,
) -> Event<'a> {
static SCHEME_LINK: Lazy = Lazy::new(|| Regex::new(r"^[a-z][a-z0-9+.-]*:").unwrap());
static HTML_MD_LINK: Lazy =
@@ -176,14 +176,14 @@ fn adjust_links<'a>(
fn fix_a_links<'a>(
dest: CowStr<'a>,
path: Option<&Path>,
- redirects: HashMap,
+ redirects: &HashMap,
) -> CowStr<'a> {
if dest.starts_with('#') {
// Fragment-only link.
if let Some(path) = path {
let mut base = path.display().to_string();
if base.ends_with(".md") {
- base.replace_range(base.len() - 3.., "");
+ base.truncate(base.len() - 3);
}
return format!(
"#{}{}",
@@ -228,7 +228,7 @@ fn adjust_links<'a>(
if let Some(_) = path {
// Fix redirect links
let normalized_path_split: Vec<&str> = normalized_path.split('#').collect();
- for (original, redirect) in &redirects {
+ for (original, redirect) in redirects {
if normalize_path(original.trim_start_matches('/'))
.eq_ignore_ascii_case(&normalized_path)
|| normalize_path(original.trim_start_matches('/'))
@@ -295,7 +295,7 @@ fn adjust_links<'a>(
fn fix_html<'a>(
html: CowStr<'a>,
path: Option<&Path>,
- redirects: HashMap,
+ redirects: &HashMap,
) -> CowStr<'a> {
// This is a terrible hack, but should be reasonably reliable. Nobody
// should ever parse a tag with a regex. However, there isn't anything
@@ -319,7 +319,7 @@ fn adjust_links<'a>(
A_LINK
.replace_all(&temp_html, |caps: ®ex::Captures<'_>| {
- let fixed = fix_a_links(caps[2].into(), path, redirects.clone());
+ let fixed = fix_a_links(caps[2].into(), path, &redirects);
format!("{}{}\"", &caps[1], fixed)
})
.into_owned()
@@ -342,7 +342,12 @@ fn adjust_links<'a>(
/// Wrapper around the pulldown-cmark parser for rendering markdown to HTML.
pub fn render_markdown(text: &str, curly_quotes: bool) -> String {
- render_markdown_with_path(text, curly_quotes, None, HashMap::new())
+ render_markdown_with_path(text, curly_quotes, None)
+}
+
+/// Wrapper around for API compatibility.
+pub fn render_markdown_with_path(text: &str, curly_quotes: bool, path: Option<&Path>) -> String {
+ render_markdown_with_path_and_redirects(text, curly_quotes, path, &HashMap::new())
}
pub fn new_cmark_parser(text: &str, curly_quotes: bool) -> Parser<'_, '_> {
@@ -357,17 +362,17 @@ pub fn new_cmark_parser(text: &str, curly_quotes: bool) -> Parser<'_, '_> {
Parser::new_ext(text, opts)
}
-pub fn render_markdown_with_path(
+pub fn render_markdown_with_path_and_redirects(
text: &str,
curly_quotes: bool,
path: Option<&Path>,
- redirects: HashMap,
+ redirects: &HashMap,
) -> String {
let mut s = String::with_capacity(text.len() * 3 / 2);
let p = new_cmark_parser(text, curly_quotes);
let events = p
.map(clean_codeblock_headers)
- .map(|event| adjust_links(event, path, redirects.clone()))
+ .map(|event| adjust_links(event, path, &redirects))
.flat_map(|event| {
let (a, b) = wrap_tables(event);
a.into_iter().chain(b)