mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-24 15:54:15 +00:00
Auto merge of #54861 - rep-nop:find_main_in_doctest, r=estebank
rustdoc: Replaces fn main search and extern crate search with proper parsing during doctests. Fixes #21299. Fixes #33731. Let me know if there's any additional changes you'd like made!
This commit is contained in:
commit
6d69fe7a2f
@ -378,7 +378,7 @@ pub fn make_test(s: &str,
|
||||
dont_insert_main: bool,
|
||||
opts: &TestOptions)
|
||||
-> (String, usize) {
|
||||
let (crate_attrs, everything_else) = partition_source(s);
|
||||
let (crate_attrs, everything_else, crates) = partition_source(s);
|
||||
let everything_else = everything_else.trim();
|
||||
let mut line_offset = 0;
|
||||
let mut prog = String::new();
|
||||
@ -402,10 +402,84 @@ pub fn make_test(s: &str,
|
||||
// are intended to be crate attributes.
|
||||
prog.push_str(&crate_attrs);
|
||||
|
||||
// Uses libsyntax to parse the doctest and find if there's a main fn and the extern
|
||||
// crate already is included.
|
||||
let (already_has_main, already_has_extern_crate) = crate::syntax::with_globals(|| {
|
||||
use crate::syntax::{ast, parse::{self, ParseSess}, source_map::FilePathMapping};
|
||||
use crate::syntax_pos::FileName;
|
||||
use errors::emitter::EmitterWriter;
|
||||
use errors::Handler;
|
||||
|
||||
let filename = FileName::Anon;
|
||||
let source = crates + &everything_else;
|
||||
|
||||
// any errors in parsing should also appear when the doctest is compiled for real, so just
|
||||
// send all the errors that libsyntax emits directly into a Sink instead of stderr
|
||||
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let emitter = EmitterWriter::new(box io::sink(), None, false, false);
|
||||
let handler = Handler::with_emitter(false, false, box emitter);
|
||||
let sess = ParseSess::with_span_handler(handler, cm);
|
||||
|
||||
debug!("about to parse: \n{}", source);
|
||||
|
||||
let mut found_main = false;
|
||||
let mut found_extern_crate = cratename.is_none();
|
||||
|
||||
let mut parser = match parse::maybe_new_parser_from_source_str(&sess, filename, source) {
|
||||
Ok(p) => p,
|
||||
Err(errs) => {
|
||||
for mut err in errs {
|
||||
err.cancel();
|
||||
}
|
||||
|
||||
return (found_main, found_extern_crate);
|
||||
}
|
||||
};
|
||||
|
||||
loop {
|
||||
match parser.parse_item() {
|
||||
Ok(Some(item)) => {
|
||||
if !found_main {
|
||||
if let ast::ItemKind::Fn(..) = item.node {
|
||||
if item.ident.as_str() == "main" {
|
||||
found_main = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !found_extern_crate {
|
||||
if let ast::ItemKind::ExternCrate(original) = item.node {
|
||||
// This code will never be reached if `cratename` is none because
|
||||
// `found_extern_crate` is initialized to `true` if it is none.
|
||||
let cratename = cratename.unwrap();
|
||||
|
||||
match original {
|
||||
Some(name) => found_extern_crate = name.as_str() == cratename,
|
||||
None => found_extern_crate = item.ident.as_str() == cratename,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if found_main && found_extern_crate {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(None) => break,
|
||||
Err(mut e) => {
|
||||
e.cancel();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(found_main, found_extern_crate)
|
||||
});
|
||||
|
||||
// Don't inject `extern crate std` because it's already injected by the
|
||||
// compiler.
|
||||
if !s.contains("extern crate") && !opts.no_crate_inject && cratename != Some("std") {
|
||||
if !already_has_extern_crate && !opts.no_crate_inject && cratename != Some("std") {
|
||||
if let Some(cratename) = cratename {
|
||||
// Make sure its actually used if not included.
|
||||
if s.contains(cratename) {
|
||||
prog.push_str(&format!("extern crate {};\n", cratename));
|
||||
line_offset += 1;
|
||||
@ -413,19 +487,6 @@ pub fn make_test(s: &str,
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME (#21299): prefer libsyntax or some other actual parser over this
|
||||
// best-effort ad hoc approach
|
||||
let already_has_main = s.lines()
|
||||
.map(|line| {
|
||||
let comment = line.find("//");
|
||||
if let Some(comment_begins) = comment {
|
||||
&line[0..comment_begins]
|
||||
} else {
|
||||
line
|
||||
}
|
||||
})
|
||||
.any(|code| code.contains("fn main"));
|
||||
|
||||
if dont_insert_main || already_has_main {
|
||||
prog.push_str(everything_else);
|
||||
} else {
|
||||
@ -441,9 +502,10 @@ pub fn make_test(s: &str,
|
||||
}
|
||||
|
||||
// FIXME(aburka): use a real parser to deal with multiline attributes
|
||||
fn partition_source(s: &str) -> (String, String) {
|
||||
fn partition_source(s: &str) -> (String, String, String) {
|
||||
let mut after_header = false;
|
||||
let mut before = String::new();
|
||||
let mut crates = String::new();
|
||||
let mut after = String::new();
|
||||
|
||||
for line in s.lines() {
|
||||
@ -457,12 +519,17 @@ fn partition_source(s: &str) -> (String, String) {
|
||||
after.push_str(line);
|
||||
after.push_str("\n");
|
||||
} else {
|
||||
if trimline.starts_with("#[macro_use] extern crate")
|
||||
|| trimline.starts_with("extern crate") {
|
||||
crates.push_str(line);
|
||||
crates.push_str("\n");
|
||||
}
|
||||
before.push_str(line);
|
||||
before.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
(before, after)
|
||||
(before, after, crates)
|
||||
}
|
||||
|
||||
pub trait Tester {
|
||||
@ -1014,4 +1081,38 @@ assert_eq!(2+2, 4);
|
||||
let output = make_test(input, None, false, &opts);
|
||||
assert_eq!(output, (expected, 1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn make_test_issues_21299_33731() {
|
||||
let opts = TestOptions::default();
|
||||
|
||||
let input =
|
||||
"// fn main
|
||||
assert_eq!(2+2, 4);";
|
||||
|
||||
let expected =
|
||||
"#![allow(unused)]
|
||||
fn main() {
|
||||
// fn main
|
||||
assert_eq!(2+2, 4);
|
||||
}".to_string();
|
||||
|
||||
let output = make_test(input, None, false, &opts);
|
||||
assert_eq!(output, (expected, 2));
|
||||
|
||||
let input =
|
||||
"extern crate hella_qwop;
|
||||
assert_eq!(asdf::foo, 4);";
|
||||
|
||||
let expected =
|
||||
"#![allow(unused)]
|
||||
extern crate hella_qwop;
|
||||
extern crate asdf;
|
||||
fn main() {
|
||||
assert_eq!(asdf::foo, 4);
|
||||
}".to_string();
|
||||
|
||||
let output = make_test(input, Some("asdf"), false, &opts);
|
||||
assert_eq!(output, (expected, 3));
|
||||
}
|
||||
}
|
||||
|
@ -70,6 +70,23 @@ macro_rules! panictry {
|
||||
})
|
||||
}
|
||||
|
||||
// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
|
||||
macro_rules! panictry_buffer {
|
||||
($handler:expr, $e:expr) => ({
|
||||
use std::result::Result::{Ok, Err};
|
||||
use errors::{FatalError, DiagnosticBuilder};
|
||||
match $e {
|
||||
Ok(e) => e,
|
||||
Err(errs) => {
|
||||
for e in errs {
|
||||
DiagnosticBuilder::new_diagnostic($handler, e).emit();
|
||||
}
|
||||
FatalError.raise()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! unwrap_or {
|
||||
($opt:expr, $default:expr) => {
|
||||
|
@ -11,7 +11,7 @@
|
||||
use ast::{self, Ident};
|
||||
use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
|
||||
use source_map::{SourceMap, FilePathMapping};
|
||||
use errors::{Applicability, FatalError, DiagnosticBuilder};
|
||||
use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
|
||||
use parse::{token, ParseSess};
|
||||
use str::char_at;
|
||||
use symbol::{Symbol, keywords};
|
||||
@ -175,6 +175,16 @@ impl<'a> StringReader<'a> {
|
||||
self.fatal_errs.clear();
|
||||
}
|
||||
|
||||
pub fn buffer_fatal_errors(&mut self) -> Vec<Diagnostic> {
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
for err in self.fatal_errs.drain(..) {
|
||||
err.buffer(&mut buffer);
|
||||
}
|
||||
|
||||
buffer
|
||||
}
|
||||
|
||||
pub fn peek(&self) -> TokenAndSpan {
|
||||
// FIXME(pcwalton): Bad copy!
|
||||
TokenAndSpan {
|
||||
@ -251,6 +261,17 @@ impl<'a> StringReader<'a> {
|
||||
Ok(sr)
|
||||
}
|
||||
|
||||
pub fn new_or_buffered_errs(sess: &'a ParseSess,
|
||||
source_file: Lrc<syntax_pos::SourceFile>,
|
||||
override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
|
||||
let mut sr = StringReader::new_raw(sess, source_file, override_span);
|
||||
if sr.advance_token().is_err() {
|
||||
Err(sr.buffer_fatal_errors())
|
||||
} else {
|
||||
Ok(sr)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
|
||||
let begin = sess.source_map().lookup_byte_offset(span.lo());
|
||||
let end = sess.source_map().lookup_byte_offset(span.hi());
|
||||
|
@ -15,7 +15,7 @@ use ast::{self, CrateConfig, NodeId};
|
||||
use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
|
||||
use source_map::{SourceMap, FilePathMapping};
|
||||
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
|
||||
use errors::{Handler, ColorConfig, DiagnosticBuilder};
|
||||
use errors::{Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
|
||||
use feature_gate::UnstableFeatures;
|
||||
use parse::parser::Parser;
|
||||
use ptr::P;
|
||||
@ -174,12 +174,21 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse
|
||||
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
|
||||
}
|
||||
|
||||
// Create a new parser from a source string
|
||||
/// Create a new parser from a source string
|
||||
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
|
||||
-> Parser {
|
||||
let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source));
|
||||
panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
|
||||
}
|
||||
|
||||
/// Create a new parser from a source string. Returns any buffered errors from lexing the initial
|
||||
/// token stream.
|
||||
pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
|
||||
-> Result<Parser, Vec<Diagnostic>>
|
||||
{
|
||||
let mut parser = maybe_source_file_to_parser(sess,
|
||||
sess.source_map().new_source_file(name, source))?;
|
||||
parser.recurse_into_file_modules = false;
|
||||
parser
|
||||
Ok(parser)
|
||||
}
|
||||
|
||||
/// Create a new parser, handling errors as appropriate
|
||||
@ -204,14 +213,23 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
|
||||
|
||||
/// Given a source_file and config, return a parser
|
||||
fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser {
|
||||
panictry_buffer!(&sess.span_diagnostic,
|
||||
maybe_source_file_to_parser(sess, source_file))
|
||||
}
|
||||
|
||||
/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
|
||||
/// initial token stream.
|
||||
fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
|
||||
-> Result<Parser, Vec<Diagnostic>>
|
||||
{
|
||||
let end_pos = source_file.end_pos;
|
||||
let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None));
|
||||
let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
|
||||
|
||||
if parser.token == token::Eof && parser.span.is_dummy() {
|
||||
parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
|
||||
}
|
||||
|
||||
parser
|
||||
Ok(parser)
|
||||
}
|
||||
|
||||
// must preserve old name for now, because quote! from the *existing*
|
||||
@ -243,9 +261,25 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
||||
pub fn source_file_to_stream(sess: &ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
override_span: Option<Span>) -> TokenStream {
|
||||
let mut srdr = lexer::StringReader::new(sess, source_file, override_span);
|
||||
panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
|
||||
}
|
||||
|
||||
/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
|
||||
/// parsing the token tream.
|
||||
pub fn maybe_file_to_stream(sess: &ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> {
|
||||
let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
|
||||
srdr.real_token();
|
||||
panictry!(srdr.parse_all_token_trees())
|
||||
|
||||
match srdr.parse_all_token_trees() {
|
||||
Ok(stream) => Ok(stream),
|
||||
Err(err) => {
|
||||
let mut buffer = Vec::with_capacity(1);
|
||||
err.buffer(&mut buffer);
|
||||
Err(buffer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given stream and the `ParseSess`, produce a parser
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
#![feature(rustc_private)]
|
||||
|
||||
extern crate env_logger;
|
||||
extern crate syntax;
|
||||
extern crate rustdoc;
|
||||
extern crate serialize as rustc_serialize;
|
||||
@ -264,6 +265,7 @@ fn parse_args() -> (OutputFormat, PathBuf) {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
PLAYGROUND.with(|slot| {
|
||||
*slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/")));
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user