rustdoc/passes/lint/
bare_urls.rs1use core::ops::Range;
5use std::mem;
6use std::sync::LazyLock;
7
8use pulldown_cmark::{Event, Parser, Tag};
9use regex::Regex;
10use rustc_errors::Applicability;
11use rustc_hir::HirId;
12use rustc_resolve::rustdoc::source_span_for_markdown_range;
13use tracing::trace;
14
15use crate::clean::*;
16use crate::core::DocContext;
17use crate::html::markdown::main_body_opts;
18
19pub(super) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &str) {
20 let report_diag = |cx: &DocContext<'_>, msg: &'static str, range: Range<usize>| {
21 let maybe_sp = source_span_for_markdown_range(cx.tcx, dox, &range, &item.attrs.doc_strings);
22 let sp = maybe_sp.unwrap_or_else(|| item.attr_span(cx.tcx));
23 cx.tcx.node_span_lint(crate::lint::BARE_URLS, hir_id, sp, |lint| {
24 lint.primary_message(msg)
25 .note("bare URLs are not automatically turned into clickable links");
26 if let Some(sp) = maybe_sp {
29 lint.multipart_suggestion(
30 "use an automatic link instead",
31 vec![
32 (sp.shrink_to_lo(), "<".to_string()),
33 (sp.shrink_to_hi(), ">".to_string()),
34 ],
35 Applicability::MachineApplicable,
36 );
37 }
38 });
39 };
40
41 let mut p = Parser::new_ext(dox, main_body_opts()).into_offset_iter();
42
43 while let Some((event, range)) = p.next() {
44 match event {
45 Event::Text(s) => find_raw_urls(cx, &s, range, &report_diag),
46 Event::Start(tag @ (Tag::CodeBlock(_) | Tag::Link { .. })) => {
48 for (event, _) in p.by_ref() {
49 match event {
50 Event::End(end)
51 if mem::discriminant(&end) == mem::discriminant(&tag.to_end()) =>
52 {
53 break;
54 }
55 _ => {}
56 }
57 }
58 }
59 _ => {}
60 }
61 }
62}
63
64static URL_REGEX: LazyLock<Regex> = LazyLock::new(|| {
65 Regex::new(concat!(
66 r"https?://", r"([-a-zA-Z0-9@:%._\+~#=]{2,256}\.)+", r"[a-zA-Z]{2,63}", r"\b([-a-zA-Z0-9@:%_\+.~#?&/=]*)" ))
71 .expect("failed to build regex")
72});
73
74fn find_raw_urls(
75 cx: &DocContext<'_>,
76 text: &str,
77 range: Range<usize>,
78 f: &impl Fn(&DocContext<'_>, &'static str, Range<usize>),
79) {
80 trace!("looking for raw urls in {text}");
81 for match_ in URL_REGEX.find_iter(text) {
83 let url_range = match_.range();
84 f(
85 cx,
86 "this URL is not a hyperlink",
87 Range { start: range.start + url_range.start, end: range.start + url_range.end },
88 );
89 }
90}