Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[librustdoc] Only split lang string on ,, , and \t #78429

Merged
merged 1 commit into from
Feb 26, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion compiler/rustc_error_codes/src/error_codes/E0761.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ Multiple candidate files were found for an out-of-line module.

Erroneous code example:

```ignore (multiple source files required for compile_fail)
```ignore (Multiple source files are required for compile_fail.)
// file: ambiguous_module/mod.rs

fn foo() {}
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_mir/src/dataflow/framework/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
//! fixpoint solution to your dataflow problem, or implement the `ResultsVisitor` interface and use
//! `visit_results`. The following example uses the `ResultsCursor` approach.
//!
//! ```ignore(cross-crate-imports)
//! ```ignore (cross-crate-imports)
//! use rustc_mir::dataflow::Analysis; // Makes `into_engine` available.
//!
//! fn do_my_analysis(tcx: TyCtxt<'tcx>, body: &mir::Body<'tcx>) {
Expand Down Expand Up @@ -211,7 +211,7 @@ pub trait Analysis<'tcx>: AnalysisDomain<'tcx> {
/// default impl and the one for all `A: GenKillAnalysis` will do the right thing.
/// Its purpose is to enable method chaining like so:
///
/// ```ignore(cross-crate-imports)
/// ```ignore (cross-crate-imports)
/// let results = MyAnalysis::new(tcx, body)
/// .into_engine(tcx, body, def_id)
/// .iterate_to_fixpoint()
Expand Down
8 changes: 4 additions & 4 deletions library/core/src/option.rs
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ impl<T> Option<T> {
/// assert_eq!(x.expect("fruits are healthy"), "value");
/// ```
///
/// ```{.should_panic}
/// ```should_panic
/// let x: Option<&str> = None;
/// x.expect("fruits are healthy"); // panics with `fruits are healthy`
/// ```
Expand Down Expand Up @@ -372,7 +372,7 @@ impl<T> Option<T> {
/// assert_eq!(x.unwrap(), "air");
/// ```
///
/// ```{.should_panic}
/// ```should_panic
/// let x: Option<&str> = None;
/// assert_eq!(x.unwrap(), "air"); // fails
/// ```
Expand Down Expand Up @@ -1114,7 +1114,7 @@ impl<T: fmt::Debug> Option<T> {
/// }
/// ```
///
/// ```{.should_panic}
/// ```should_panic
/// #![feature(option_expect_none)]
///
/// use std::collections::HashMap;
Expand Down Expand Up @@ -1156,7 +1156,7 @@ impl<T: fmt::Debug> Option<T> {
/// }
/// ```
///
/// ```{.should_panic}
/// ```should_panic
/// #![feature(option_unwrap_none)]
///
/// use std::collections::HashMap;
Expand Down
12 changes: 6 additions & 6 deletions library/core/src/result.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@
//! assert success with [`expect`]. This will panic if the
//! write fails, providing a marginally useful message indicating why:
//!
//! ```{.no_run}
//! ```no_run
//! use std::fs::File;
//! use std::io::prelude::*;
//!
Expand All @@ -122,7 +122,7 @@
//!
//! You might also simply assert success:
//!
//! ```{.no_run}
//! ```no_run
//! # use std::fs::File;
//! # use std::io::prelude::*;
//! # let mut file = File::create("valuable_data.txt").unwrap();
Expand Down Expand Up @@ -984,7 +984,7 @@ impl<T, E: fmt::Debug> Result<T, E> {
///
/// Basic usage:
///
/// ```{.should_panic}
/// ```should_panic
/// let x: Result<u32, &str> = Err("emergency failure");
/// x.expect("Testing expect"); // panics with `Testing expect: emergency failure`
/// ```
Expand Down Expand Up @@ -1024,7 +1024,7 @@ impl<T, E: fmt::Debug> Result<T, E> {
/// assert_eq!(x.unwrap(), 2);
/// ```
///
/// ```{.should_panic}
/// ```should_panic
/// let x: Result<u32, &str> = Err("emergency failure");
/// x.unwrap(); // panics with `emergency failure`
/// ```
Expand Down Expand Up @@ -1052,7 +1052,7 @@ impl<T: fmt::Debug, E> Result<T, E> {
///
/// Basic usage:
///
/// ```{.should_panic}
/// ```should_panic
/// let x: Result<u32, &str> = Ok(10);
/// x.expect_err("Testing expect_err"); // panics with `Testing expect_err: 10`
/// ```
Expand All @@ -1075,7 +1075,7 @@ impl<T: fmt::Debug, E> Result<T, E> {
///
/// # Examples
///
/// ```{.should_panic}
/// ```should_panic
/// let x: Result<u32, &str> = Ok(2);
/// x.unwrap_err(); // panics with `2`
/// ```
Expand Down
32 changes: 29 additions & 3 deletions src/librustdoc/html/markdown.rs
Original file line number Diff line number Diff line change
Expand Up @@ -779,6 +779,31 @@ impl LangString {
Self::parse(string, allow_error_code_check, enable_per_target_ignores, None)
}

fn tokens(string: &str) -> impl Iterator<Item = &str> {
// Pandoc, which Rust once used for generating documentation,
// expects lang strings to be surrounded by `{}` and for each token
// to be proceeded by a `.`. Since some of these lang strings are still
// loose in the wild, we strip a pair of surrounding `{}` from the lang
// string and a leading `.` from each token.

let string = string.trim();

let first = string.chars().next();
let last = string.chars().last();

let string = if first == Some('{') && last == Some('}') {
&string[1..string.len() - 1]
} else {
string
};

string
.split(|c| c == ',' || c == ' ' || c == '\t')
.map(str::trim)
.map(|token| if token.chars().next() == Some('.') { &token[1..] } else { token })
.filter(|token| !token.is_empty())
}

fn parse(
string: &str,
allow_error_code_check: ErrorCodes,
Expand All @@ -792,11 +817,11 @@ impl LangString {
let mut ignores = vec![];

data.original = string.to_owned();
let tokens = string.split(|c: char| !(c == '_' || c == '-' || c.is_alphanumeric()));

let tokens = Self::tokens(string).collect::<Vec<&str>>();

for token in tokens {
match token.trim() {
"" => {}
match token {
"should_panic" => {
data.should_panic = true;
seen_rust_tags = !seen_other_tags;
Expand Down Expand Up @@ -893,6 +918,7 @@ impl LangString {
_ => seen_other_tags = true,
}
}

// ignore-foo overrides ignore
if !ignores.is_empty() {
data.ignore = Ignore::Some(ignores);
Expand Down
46 changes: 42 additions & 4 deletions src/librustdoc/html/markdown/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,9 @@ fn test_lang_string_parse() {

t(Default::default());
t(LangString { original: "rust".into(), ..Default::default() });
t(LangString { original: ".rust".into(), ..Default::default() });
t(LangString { original: "{rust}".into(), ..Default::default() });
t(LangString { original: "{.rust}".into(), ..Default::default() });
t(LangString { original: "sh".into(), rust: false, ..Default::default() });
t(LangString { original: "ignore".into(), ignore: Ignore::All, ..Default::default() });
t(LangString {
Expand All @@ -75,16 +78,16 @@ fn test_lang_string_parse() {
..Default::default()
});
t(LangString { original: "allow_fail".into(), allow_fail: true, ..Default::default() });
t(LangString { original: "{.no_run .example}".into(), no_run: true, ..Default::default() });
t(LangString { original: "no_run,example".into(), no_run: true, ..Default::default() });
t(LangString {
original: "{.sh .should_panic}".into(),
original: "sh,should_panic".into(),
should_panic: true,
rust: false,
..Default::default()
});
t(LangString { original: "{.example .rust}".into(), ..Default::default() });
t(LangString { original: "example,rust".into(), ..Default::default() });
t(LangString {
original: "{.test_harness .rust}".into(),
original: "test_harness,.rust".into(),
test_harness: true,
..Default::default()
});
Expand All @@ -100,6 +103,18 @@ fn test_lang_string_parse() {
rust: false,
..Default::default()
});
t(LangString {
original: "text,no_run, ".into(),
no_run: true,
rust: false,
..Default::default()
});
t(LangString {
original: "text,no_run,".into(),
no_run: true,
rust: false,
..Default::default()
});
t(LangString {
original: "edition2015".into(),
edition: Some(Edition::Edition2015),
Expand All @@ -112,6 +127,29 @@ fn test_lang_string_parse() {
});
}

#[test]
fn test_lang_string_tokenizer() {
fn case(lang_string: &str, want: &[&str]) {
let have = LangString::tokens(lang_string).collect::<Vec<&str>>();
assert_eq!(have, want, "Unexpected lang string split for `{}`", lang_string);
}

case("", &[]);
case("foo", &["foo"]);
case("foo,bar", &["foo", "bar"]);
case(".foo,.bar", &["foo", "bar"]);
case("{.foo,.bar}", &["foo", "bar"]);
case(" {.foo,.bar} ", &["foo", "bar"]);
case("foo bar", &["foo", "bar"]);
case("foo\tbar", &["foo", "bar"]);
case("foo\t, bar", &["foo", "bar"]);
case(" foo , bar ", &["foo", "bar"]);
case(",,foo,,bar,,", &["foo", "bar"]);
case("foo=bar", &["foo=bar"]);
case("a-b-c", &["a-b-c"]);
case("a_b_c", &["a_b_c"]);
}

#[test]
fn test_header() {
fn t(input: &str, expect: &str) {
Expand Down