mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-26 16:54:01 +00:00
Auto merge of #65324 - Centril:organize-syntax, r=petrochenkov
Split libsyntax apart In this PR the general idea is to separate the AST, parser, and friends by a more data / logic structure (tho not fully realized!) by separating out the parser and macro expansion code from libsyntax. Specifically have now three crates instead of one (libsyntax): - libsyntax: - concrete syntax tree (`syntax::ast`) - definition of tokens and token-streams (`syntax::{token, tokenstream}`) -- used by `syntax::ast` - visitors (`syntax::visit`, `syntax::mut_visit`) - shared definitions between `libsyntax_expand` - feature gating (`syntax::feature_gate`) -- we could possibly move this out to its own crater later. - attribute and meta item utilities, including used-marking (`syntax::attr`) - pretty printer (`syntax::print`) -- this should possibly be moved out later. For now I've reduced down the dependencies to a single essential one which could be broken via `ParseSess`. This entails that e.g. `Debug` impls for `Path` cannot reference the pretty printer. - definition of `ParseSess` (`syntax::sess`) -- this is used by `syntax::{attr, print, feature_gate}` and is a common definition used by the parser and other things like librustc. - the `syntax::source_map` -- this includes definitions used by `syntax::ast` and other things but could ostensibly be moved `syntax_pos` since that is more related to this module. - a smattering of misc utilities not sufficiently important to itemize -- some of these could be moved to where they are used (often a single place) but I wanted to limit the scope of this PR. - librustc_parse: - parser (`rustc_parse::parser`) -- reading a file and such are defined in the crate root tho. - lexer (`rustc_parse::lexer`) - validation of meta grammar (post-expansion) in (`rustc_parse::validate_attr`) - libsyntax_expand -- this defines the infra for macro expansion and conditional compilation but this is not libsyntax_ext; we might want to merge them later but currently libsyntax_expand is depended on by librustc_metadata which libsyntax_ext is not. - conditional compilation (`syntax_expand::config`) -- moved from `syntax::config` to here - the bulk of this crate is made up of the old `syntax::ext` r? @estebank
This commit is contained in:
commit
a3b6e5705c
26
Cargo.lock
26
Cargo.lock
@ -3504,6 +3504,7 @@ dependencies = [
|
||||
"rustc_lint",
|
||||
"rustc_metadata",
|
||||
"rustc_mir",
|
||||
"rustc_parse",
|
||||
"rustc_plugin",
|
||||
"rustc_plugin_impl",
|
||||
"rustc_resolve",
|
||||
@ -3572,6 +3573,7 @@ dependencies = [
|
||||
"rustc_lint",
|
||||
"rustc_metadata",
|
||||
"rustc_mir",
|
||||
"rustc_parse",
|
||||
"rustc_passes",
|
||||
"rustc_plugin_impl",
|
||||
"rustc_privacy",
|
||||
@ -3649,6 +3651,7 @@ dependencies = [
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_index",
|
||||
"rustc_parse",
|
||||
"rustc_target",
|
||||
"serialize",
|
||||
"smallvec 1.0.0",
|
||||
@ -3692,6 +3695,21 @@ dependencies = [
|
||||
"core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_parse"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"log",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_lexer",
|
||||
"rustc_target",
|
||||
"smallvec 1.0.0",
|
||||
"syntax",
|
||||
"syntax_pos",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc_passes"
|
||||
version = "0.0.0"
|
||||
@ -3701,6 +3719,7 @@ dependencies = [
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_index",
|
||||
"rustc_parse",
|
||||
"rustc_target",
|
||||
"syntax",
|
||||
"syntax_pos",
|
||||
@ -3763,6 +3782,7 @@ dependencies = [
|
||||
"rustc",
|
||||
"rustc_codegen_utils",
|
||||
"rustc_data_structures",
|
||||
"rustc_parse",
|
||||
"serde_json",
|
||||
"syntax",
|
||||
"syntax_pos",
|
||||
@ -4372,14 +4392,11 @@ dependencies = [
|
||||
name = "syntax_expand"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"lazy_static 1.3.0",
|
||||
"log",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_index",
|
||||
"rustc_lexer",
|
||||
"scoped-tls",
|
||||
"rustc_parse",
|
||||
"serialize",
|
||||
"smallvec 1.0.0",
|
||||
"syntax",
|
||||
@ -4394,6 +4411,7 @@ dependencies = [
|
||||
"log",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_parse",
|
||||
"rustc_target",
|
||||
"smallvec 1.0.0",
|
||||
"syntax",
|
||||
|
@ -26,7 +26,7 @@ use syntax::expand::allocator::AllocatorKind;
|
||||
use syntax::feature_gate::{self, AttributeType};
|
||||
use syntax::json::JsonEmitter;
|
||||
use syntax::source_map;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::sess::{ParseSess, ProcessCfgMod};
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::{MultiSpan, Span};
|
||||
use crate::util::profiling::{SelfProfiler, SelfProfilerRef};
|
||||
@ -934,6 +934,7 @@ pub fn build_session(
|
||||
sopts: config::Options,
|
||||
local_crate_source_file: Option<PathBuf>,
|
||||
registry: errors::registry::Registry,
|
||||
process_cfg_mod: ProcessCfgMod,
|
||||
) -> Session {
|
||||
let file_path_mapping = sopts.file_path_mapping();
|
||||
|
||||
@ -944,6 +945,7 @@ pub fn build_session(
|
||||
Lrc::new(source_map::SourceMap::new(file_path_mapping)),
|
||||
DiagnosticOutput::Default,
|
||||
Default::default(),
|
||||
process_cfg_mod,
|
||||
)
|
||||
}
|
||||
|
||||
@ -1022,6 +1024,7 @@ pub fn build_session_with_source_map(
|
||||
source_map: Lrc<source_map::SourceMap>,
|
||||
diagnostics_output: DiagnosticOutput,
|
||||
lint_caps: FxHashMap<lint::LintId, lint::Level>,
|
||||
process_cfg_mod: ProcessCfgMod,
|
||||
) -> Session {
|
||||
// FIXME: This is not general enough to make the warning lint completely override
|
||||
// normal diagnostic warnings, since the warning lint can also be denied and changed
|
||||
@ -1062,7 +1065,14 @@ pub fn build_session_with_source_map(
|
||||
},
|
||||
);
|
||||
|
||||
build_session_(sopts, local_crate_source_file, diagnostic_handler, source_map, lint_caps)
|
||||
build_session_(
|
||||
sopts,
|
||||
local_crate_source_file,
|
||||
diagnostic_handler,
|
||||
source_map,
|
||||
lint_caps,
|
||||
process_cfg_mod,
|
||||
)
|
||||
}
|
||||
|
||||
fn build_session_(
|
||||
@ -1071,6 +1081,7 @@ fn build_session_(
|
||||
span_diagnostic: errors::Handler,
|
||||
source_map: Lrc<source_map::SourceMap>,
|
||||
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
|
||||
process_cfg_mod: ProcessCfgMod,
|
||||
) -> Session {
|
||||
let self_profiler =
|
||||
if let SwitchWithOptPath::Enabled(ref d) = sopts.debugging_opts.self_profile {
|
||||
@ -1109,6 +1120,7 @@ fn build_session_(
|
||||
let parse_sess = ParseSess::with_span_handler(
|
||||
span_diagnostic,
|
||||
source_map,
|
||||
process_cfg_mod,
|
||||
);
|
||||
let sysroot = match &sopts.maybe_sysroot {
|
||||
Some(sysroot) => sysroot.clone(),
|
||||
|
@ -21,6 +21,7 @@ rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
rustc_metadata = { path = "../librustc_metadata" }
|
||||
rustc_mir = { path = "../librustc_mir" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
rustc_plugin = { path = "../librustc_plugin/deprecated" } # To get this in the sysroot
|
||||
rustc_plugin_impl = { path = "../librustc_plugin" }
|
||||
rustc_save_analysis = { path = "../librustc_save_analysis" }
|
||||
|
@ -63,7 +63,6 @@ use std::time::Instant;
|
||||
use syntax::ast;
|
||||
use syntax::source_map::FileLoader;
|
||||
use syntax::feature_gate::{GatedCfg, UnstableFeatures};
|
||||
use syntax::parse;
|
||||
use syntax::symbol::sym;
|
||||
use syntax_pos::{DUMMY_SP, FileName};
|
||||
|
||||
@ -1062,14 +1061,16 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
|
||||
}
|
||||
|
||||
fn parse_crate_attrs<'a>(sess: &'a Session, input: &Input) -> PResult<'a, Vec<ast::Attribute>> {
|
||||
match *input {
|
||||
Input::File(ref ifile) => {
|
||||
parse::parse_crate_attrs_from_file(ifile, &sess.parse_sess)
|
||||
match input {
|
||||
Input::File(ifile) => {
|
||||
rustc_parse::parse_crate_attrs_from_file(ifile, &sess.parse_sess)
|
||||
}
|
||||
Input::Str { ref name, ref input } => {
|
||||
parse::parse_crate_attrs_from_source_str(name.clone(),
|
||||
input.clone(),
|
||||
&sess.parse_sess)
|
||||
Input::Str { name, input } => {
|
||||
rustc_parse::parse_crate_attrs_from_source_str(
|
||||
name.clone(),
|
||||
input.clone(),
|
||||
&sess.parse_sess,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
||||
syntax = { path = "../libsyntax" }
|
||||
syntax_ext = { path = "../libsyntax_ext" }
|
||||
syntax_expand = { path = "../libsyntax_expand" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
rustc_serialize = { path = "../libserialize", package = "serialize" }
|
||||
rustc = { path = "../librustc" }
|
||||
|
@ -11,14 +11,15 @@ use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||
use rustc_data_structures::OnDrop;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use std::path::PathBuf;
|
||||
use std::result;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use syntax::{self, parse};
|
||||
use syntax::ast::{self, MetaItemKind};
|
||||
use syntax::token;
|
||||
use syntax::source_map::{FileName, FileLoader, SourceMap};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use syntax_pos::edition;
|
||||
|
||||
pub type Result<T> = result::Result<T, ErrorReported>;
|
||||
@ -64,9 +65,9 @@ impl Compiler {
|
||||
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
|
||||
syntax::with_default_globals(move || {
|
||||
let cfg = cfgspecs.into_iter().map(|s| {
|
||||
let sess = ParseSess::with_silent_emitter();
|
||||
let sess = ParseSess::with_silent_emitter(process_configure_mod);
|
||||
let filename = FileName::cfg_spec_source_code(&s);
|
||||
let mut parser = parse::new_parser_from_source_str(&sess, filename, s.to_string());
|
||||
let mut parser = new_parser_from_source_str(&sess, filename, s.to_string());
|
||||
|
||||
macro_rules! error {($reason: expr) => {
|
||||
early_error(ErrorOutputType::default(),
|
||||
|
@ -27,6 +27,7 @@ use rustc_errors::PResult;
|
||||
use rustc_incremental;
|
||||
use rustc_metadata::cstore;
|
||||
use rustc_mir as mir;
|
||||
use rustc_parse::{parse_crate_from_file, parse_crate_from_source_str};
|
||||
use rustc_passes::{self, ast_validation, hir_stats, layout_test};
|
||||
use rustc_plugin as plugin;
|
||||
use rustc_plugin::registry::Registry;
|
||||
@ -38,7 +39,6 @@ use syntax::{self, ast, visit};
|
||||
use syntax::early_buffered_lints::BufferedEarlyLint;
|
||||
use syntax_expand::base::{NamedSyntaxExtension, ExtCtxt};
|
||||
use syntax::mut_visit::MutVisitor;
|
||||
use syntax::parse;
|
||||
use syntax::util::node_count::NodeCounter;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::FileName;
|
||||
@ -61,12 +61,11 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
|
||||
let krate = time(sess, "parsing", || {
|
||||
let _prof_timer = sess.prof.generic_activity("parse_crate");
|
||||
|
||||
match *input {
|
||||
Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
|
||||
Input::Str {
|
||||
ref input,
|
||||
ref name,
|
||||
} => parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess),
|
||||
match input {
|
||||
Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
|
||||
Input::Str { input, name } => {
|
||||
parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess)
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
@ -182,7 +181,7 @@ pub fn register_plugins<'a>(
|
||||
)
|
||||
});
|
||||
|
||||
let (krate, features) = syntax::config::features(
|
||||
let (krate, features) = syntax_expand::config::features(
|
||||
krate,
|
||||
&sess.parse_sess,
|
||||
sess.edition(),
|
||||
@ -489,7 +488,7 @@ pub fn lower_to_hir(
|
||||
) -> Result<hir::map::Forest> {
|
||||
// Lower AST to HIR.
|
||||
let hir_forest = time(sess, "lowering AST -> HIR", || {
|
||||
let nt_to_tokenstream = syntax::parse::nt_to_tokenstream;
|
||||
let nt_to_tokenstream = rustc_parse::nt_to_tokenstream;
|
||||
let hir_crate = lower_crate(sess, &dep_graph, &krate, resolver, nt_to_tokenstream);
|
||||
|
||||
if sess.opts.debugging_opts.hir_stats {
|
||||
|
@ -8,7 +8,7 @@ use rustc::session::config::{build_configuration, build_session_options, to_crat
|
||||
use rustc::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry};
|
||||
use rustc::session::config::{Externs, OutputType, OutputTypes, SymbolManglingVersion};
|
||||
use rustc::session::config::{rustc_optgroups, Options, ErrorOutputType, Passes};
|
||||
use rustc::session::build_session;
|
||||
use rustc::session::{build_session, Session};
|
||||
use rustc::session::search_paths::SearchPath;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::iter::FromIterator;
|
||||
@ -17,16 +17,23 @@ use rustc_target::spec::{MergeFunctions, PanicStrategy, RelroLevel};
|
||||
use syntax::symbol::sym;
|
||||
use syntax::edition::{Edition, DEFAULT_EDITION};
|
||||
use syntax;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::{ColorConfig, emitter::HumanReadableErrorType, registry};
|
||||
|
||||
pub fn build_session_options_and_crate_config(
|
||||
matches: &getopts::Matches,
|
||||
) -> (Options, FxHashSet<(String, Option<String>)>) {
|
||||
(
|
||||
build_session_options(matches),
|
||||
parse_cfgspecs(matches.opt_strs("cfg")),
|
||||
)
|
||||
type CfgSpecs = FxHashSet<(String, Option<String>)>;
|
||||
|
||||
fn build_session_options_and_crate_config(matches: getopts::Matches) -> (Options, CfgSpecs) {
|
||||
let sessopts = build_session_options(&matches);
|
||||
let cfg = parse_cfgspecs(matches.opt_strs("cfg"));
|
||||
(sessopts, cfg)
|
||||
}
|
||||
|
||||
fn mk_session(matches: getopts::Matches) -> (Session, CfgSpecs) {
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
||||
let sess = build_session(sessopts, None, registry, process_configure_mod);
|
||||
(sess, cfg)
|
||||
}
|
||||
|
||||
fn new_public_extern_entry<S, I>(locations: I) -> ExternEntry
|
||||
@ -59,31 +66,19 @@ fn mk_map<K: Ord, V>(entries: Vec<(K, V)>) -> BTreeMap<K, V> {
|
||||
#[test]
|
||||
fn test_switch_implies_cfg_test() {
|
||||
syntax::with_default_globals(|| {
|
||||
let matches = &match optgroups().parse(&["--test".to_string()]) {
|
||||
Ok(m) => m,
|
||||
Err(f) => panic!("test_switch_implies_cfg_test: {}", f),
|
||||
};
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let matches = optgroups().parse(&["--test".to_string()]).unwrap();
|
||||
let (sess, cfg) = mk_session(matches);
|
||||
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
||||
assert!(cfg.contains(&(sym::test, None)));
|
||||
});
|
||||
}
|
||||
|
||||
// When the user supplies --test and --cfg test, don't implicitly add
|
||||
// another --cfg test
|
||||
// When the user supplies --test and --cfg test, don't implicitly add another --cfg test
|
||||
#[test]
|
||||
fn test_switch_implies_cfg_test_unless_cfg_test() {
|
||||
syntax::with_default_globals(|| {
|
||||
let matches = &match optgroups().parse(&["--test".to_string(),
|
||||
"--cfg=test".to_string()]) {
|
||||
Ok(m) => m,
|
||||
Err(f) => panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f),
|
||||
};
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let matches = optgroups().parse(&["--test".to_string(), "--cfg=test".to_string()]).unwrap();
|
||||
let (sess, cfg) = mk_session(matches);
|
||||
let cfg = build_configuration(&sess, to_crate_config(cfg));
|
||||
let mut test_items = cfg.iter().filter(|&&(name, _)| name == sym::test);
|
||||
assert!(test_items.next().is_some());
|
||||
@ -95,9 +90,7 @@ fn test_switch_implies_cfg_test_unless_cfg_test() {
|
||||
fn test_can_print_warnings() {
|
||||
syntax::with_default_globals(|| {
|
||||
let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let (sess, _) = mk_session(matches);
|
||||
assert!(!sess.diagnostic().can_emit_warnings());
|
||||
});
|
||||
|
||||
@ -105,17 +98,13 @@ fn test_can_print_warnings() {
|
||||
let matches = optgroups()
|
||||
.parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()])
|
||||
.unwrap();
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let (sess, _) = mk_session(matches);
|
||||
assert!(sess.diagnostic().can_emit_warnings());
|
||||
});
|
||||
|
||||
syntax::with_default_globals(|| {
|
||||
let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
|
||||
let registry = registry::Registry::new(&[]);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let (sess, _) = mk_session(matches);
|
||||
assert!(sess.diagnostic().can_emit_warnings());
|
||||
});
|
||||
}
|
||||
@ -704,6 +693,6 @@ fn test_edition_parsing() {
|
||||
let matches = optgroups()
|
||||
.parse(&["--edition=2018".to_string()])
|
||||
.unwrap();
|
||||
let (sessopts, _) = build_session_options_and_crate_config(&matches);
|
||||
let (sessopts, _) = build_session_options_and_crate_config(matches);
|
||||
assert!(sessopts.edition == Edition::Edition2018)
|
||||
}
|
||||
|
@ -36,6 +36,7 @@ use syntax::util::lev_distance::find_best_match_for_name;
|
||||
use syntax::source_map::{FileLoader, RealFileLoader, SourceMap};
|
||||
use syntax::symbol::{Symbol, sym};
|
||||
use syntax::{self, ast, attr};
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use syntax_pos::edition::Edition;
|
||||
#[cfg(not(parallel_compiler))]
|
||||
use std::{thread, panic};
|
||||
@ -49,6 +50,7 @@ pub fn diagnostics_registry() -> Registry {
|
||||
// FIXME: need to figure out a way to get these back in here
|
||||
// all_errors.extend_from_slice(get_codegen_backend(sess).diagnostics());
|
||||
all_errors.extend_from_slice(&rustc_metadata::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_parse::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_passes::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_plugin::error_codes::DIAGNOSTICS);
|
||||
all_errors.extend_from_slice(&rustc_mir::error_codes::DIAGNOSTICS);
|
||||
@ -103,6 +105,7 @@ pub fn create_session(
|
||||
source_map.clone(),
|
||||
diagnostic_output,
|
||||
lint_caps,
|
||||
process_configure_mod,
|
||||
);
|
||||
|
||||
let codegen_backend = get_codegen_backend(&sess);
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Low-level Rust lexer.
|
||||
//!
|
||||
//! Tokens produced by this lexer are not yet ready for parsing the Rust syntax,
|
||||
//! for that see `libsyntax::parse::lexer`, which converts this basic token stream
|
||||
//! for that see `librustc_parse::lexer`, which converts this basic token stream
|
||||
//! into wide tokens used by actual parser.
|
||||
//!
|
||||
//! The purpose of this crate is to convert raw sources into a labeled sequence
|
||||
|
@ -23,4 +23,5 @@ rustc_serialize = { path = "../libserialize", package = "serialize" }
|
||||
stable_deref_trait = "1.0.0"
|
||||
syntax = { path = "../libsyntax" }
|
||||
syntax_expand = { path = "../libsyntax_expand" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
|
@ -18,6 +18,8 @@ use rustc::hir::map::{DefKey, DefPath, DefPathHash};
|
||||
use rustc::hir::map::definitions::DefPathTable;
|
||||
use rustc::util::nodemap::DefIdMap;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_parse::source_file_to_stream;
|
||||
use rustc_parse::parser::emit_unclosed_delims;
|
||||
|
||||
use smallvec::SmallVec;
|
||||
use std::any::Any;
|
||||
@ -27,8 +29,6 @@ use std::sync::Arc;
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::source_map;
|
||||
use syntax::parse::source_file_to_stream;
|
||||
use syntax::parse::parser::emit_unclosed_delims;
|
||||
use syntax::source_map::Spanned;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::{Span, FileName};
|
||||
|
21
src/librustc_parse/Cargo.toml
Normal file
21
src/librustc_parse/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
||||
[package]
|
||||
authors = ["The Rust Project Developers"]
|
||||
name = "rustc_parse"
|
||||
version = "0.0.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
name = "rustc_parse"
|
||||
path = "lib.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.0"
|
||||
log = "0.4"
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
syntax = { path = "../libsyntax" }
|
||||
errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_lexer = { path = "../librustc_lexer" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
174
src/librustc_parse/error_codes.rs
Normal file
174
src/librustc_parse/error_codes.rs
Normal file
@ -0,0 +1,174 @@
|
||||
// Error messages for EXXXX errors.
|
||||
// Each message should start and end with a new line, and be wrapped to 80
|
||||
// characters. In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use
|
||||
// `:set tw=0` to disable.
|
||||
syntax::register_diagnostics! {
|
||||
|
||||
E0178: r##"
|
||||
In types, the `+` type operator has low precedence, so it is often necessary
|
||||
to use parentheses.
|
||||
|
||||
For example:
|
||||
|
||||
```compile_fail,E0178
|
||||
trait Foo {}
|
||||
|
||||
struct Bar<'a> {
|
||||
w: &'a Foo + Copy, // error, use &'a (Foo + Copy)
|
||||
x: &'a Foo + 'a, // error, use &'a (Foo + 'a)
|
||||
y: &'a mut Foo + 'a, // error, use &'a mut (Foo + 'a)
|
||||
z: fn() -> Foo + 'a, // error, use fn() -> (Foo + 'a)
|
||||
}
|
||||
```
|
||||
|
||||
More details can be found in [RFC 438].
|
||||
|
||||
[RFC 438]: https://github.com/rust-lang/rfcs/pull/438
|
||||
"##,
|
||||
|
||||
E0583: r##"
|
||||
A file wasn't found for an out-of-line module.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```ignore (compile_fail not working here; see Issue #43707)
|
||||
mod file_that_doesnt_exist; // error: file not found for module
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
|
||||
Please be sure that a file corresponding to the module exists. If you
|
||||
want to use a module named `file_that_doesnt_exist`, you need to have a file
|
||||
named `file_that_doesnt_exist.rs` or `file_that_doesnt_exist/mod.rs` in the
|
||||
same directory.
|
||||
"##,
|
||||
|
||||
E0584: r##"
|
||||
A doc comment that is not attached to anything has been encountered.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0584
|
||||
trait Island {
|
||||
fn lost();
|
||||
|
||||
/// I'm lost!
|
||||
}
|
||||
```
|
||||
|
||||
A little reminder: a doc comment has to be placed before the item it's supposed
|
||||
to document. So if you want to document the `Island` trait, you need to put a
|
||||
doc comment before it, not inside it. Same goes for the `lost` method: the doc
|
||||
comment needs to be before it:
|
||||
|
||||
```
|
||||
/// I'm THE island!
|
||||
trait Island {
|
||||
/// I'm lost!
|
||||
fn lost();
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0585: r##"
|
||||
A documentation comment that doesn't document anything was found.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0585
|
||||
fn main() {
|
||||
// The following doc comment will fail:
|
||||
/// This is a useless doc comment!
|
||||
}
|
||||
```
|
||||
|
||||
Documentation comments need to be followed by items, including functions,
|
||||
types, modules, etc. Examples:
|
||||
|
||||
```
|
||||
/// I'm documenting the following struct:
|
||||
struct Foo;
|
||||
|
||||
/// I'm documenting the following function:
|
||||
fn foo() {}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0586: r##"
|
||||
An inclusive range was used with no end.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0586
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..=]; // error: inclusive range was used with no end
|
||||
}
|
||||
```
|
||||
|
||||
An inclusive range needs an end in order to *include* it. If you just need a
|
||||
start and no end, use a non-inclusive range (with `..`):
|
||||
|
||||
```
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..]; // ok!
|
||||
}
|
||||
```
|
||||
|
||||
Or put an end to your inclusive range:
|
||||
|
||||
```
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..=3]; // ok!
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0704: r##"
|
||||
This error indicates that a incorrect visibility restriction was specified.
|
||||
|
||||
Example of erroneous code:
|
||||
|
||||
```compile_fail,E0704
|
||||
mod foo {
|
||||
pub(foo) struct Bar {
|
||||
x: i32
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To make struct `Bar` only visible in module `foo` the `in` keyword should be
|
||||
used:
|
||||
```
|
||||
mod foo {
|
||||
pub(in crate::foo) struct Bar {
|
||||
x: i32
|
||||
}
|
||||
}
|
||||
# fn main() {}
|
||||
```
|
||||
|
||||
For more information see the Rust Reference on [Visibility].
|
||||
|
||||
[Visibility]: https://doc.rust-lang.org/reference/visibility-and-privacy.html
|
||||
"##,
|
||||
|
||||
E0743: r##"
|
||||
C-variadic has been used on a non-foreign function.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0743
|
||||
fn foo2(x: u8, ...) {} // error!
|
||||
```
|
||||
|
||||
Only foreign functions can use C-variadic (`...`). It is used to give an
|
||||
undefined number of parameters to a given function (like `printf` in C). The
|
||||
equivalent in Rust would be to use macros directly.
|
||||
"##,
|
||||
|
||||
;
|
||||
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
use crate::token::{self, Token, TokenKind};
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{sym, Symbol};
|
||||
use crate::util::comments;
|
||||
use syntax::token::{self, Token, TokenKind};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax::util::comments;
|
||||
|
||||
use errors::{FatalError, DiagnosticBuilder};
|
||||
use syntax_pos::{BytePos, Pos, Span};
|
||||
@ -13,9 +13,6 @@ use std::convert::TryInto;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use log::debug;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
mod tokentrees;
|
||||
mod unicode_chars;
|
||||
mod unescape_error_reporting;
|
||||
@ -35,7 +32,8 @@ pub struct StringReader<'a> {
|
||||
/// Initial position, read-only.
|
||||
start_pos: BytePos,
|
||||
/// The absolute offset within the source_map of the current character.
|
||||
pos: BytePos,
|
||||
// FIXME(#64197): `pub` is needed by tests for now.
|
||||
pub pos: BytePos,
|
||||
/// Stop reading src at this index.
|
||||
end_src_index: usize,
|
||||
/// Source text to tokenize.
|
@ -3,9 +3,9 @@ use syntax_pos::Span;
|
||||
|
||||
use super::{StringReader, UnmatchedBrace};
|
||||
|
||||
use crate::print::pprust::token_to_string;
|
||||
use crate::token::{self, Token};
|
||||
use crate::tokenstream::{DelimSpan, IsJoint::{self, *}, TokenStream, TokenTree, TreeAndJoint};
|
||||
use syntax::print::pprust::token_to_string;
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::tokenstream::{DelimSpan, IsJoint::{self, *}, TokenStream, TokenTree, TreeAndJoint};
|
||||
|
||||
use errors::PResult;
|
||||
|
@ -6,7 +6,7 @@ use std::iter::once;
|
||||
use rustc_lexer::unescape::{EscapeError, Mode};
|
||||
use syntax_pos::{Span, BytePos};
|
||||
|
||||
use crate::errors::{Handler, Applicability};
|
||||
use syntax::errors::{Handler, Applicability};
|
||||
|
||||
pub(crate) fn emit_unescape_error(
|
||||
handler: &Handler,
|
@ -1,11 +1,12 @@
|
||||
//! The main parser interface.
|
||||
|
||||
use crate::ast;
|
||||
use crate::parse::parser::{Parser, emit_unclosed_delims, make_unclosed_delims_error};
|
||||
use crate::token::{self, Nonterminal};
|
||||
use crate::tokenstream::{self, TokenStream, TokenTree};
|
||||
use crate::print::pprust;
|
||||
use crate::sess::ParseSess;
|
||||
#![feature(crate_visibility_modifier)]
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::print::pprust;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::token::{self, Nonterminal};
|
||||
use syntax::tokenstream::{self, TokenStream, TokenTree};
|
||||
|
||||
use errors::{PResult, FatalError, Level, Diagnostic};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
@ -17,12 +18,14 @@ use std::str;
|
||||
|
||||
use log::info;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
|
||||
|
||||
#[macro_use]
|
||||
pub mod parser;
|
||||
use parser::{Parser, emit_unclosed_delims, make_unclosed_delims_error};
|
||||
pub mod lexer;
|
||||
pub mod validate_attr;
|
||||
pub mod error_codes;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Directory<'a> {
|
@ -1,12 +1,10 @@
|
||||
use super::{SeqSep, Parser, TokenType, PathStyle};
|
||||
use crate::attr;
|
||||
use crate::ast;
|
||||
use crate::util::comments;
|
||||
use crate::token::{self, Nonterminal, DelimToken};
|
||||
use crate::tokenstream::{TokenStream, TokenTree};
|
||||
use crate::source_map::Span;
|
||||
|
||||
use syntax_pos::Symbol;
|
||||
use syntax::attr;
|
||||
use syntax::ast;
|
||||
use syntax::util::comments;
|
||||
use syntax::token::{self, Nonterminal, DelimToken};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
use syntax_pos::{Span, Symbol};
|
||||
use errors::PResult;
|
||||
|
||||
use log::debug;
|
||||
@ -158,12 +156,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
};
|
||||
|
||||
Ok(ast::Attribute {
|
||||
kind: ast::AttrKind::Normal(item),
|
||||
id: attr::mk_attr_id(),
|
||||
style,
|
||||
span,
|
||||
})
|
||||
Ok(attr::mk_attr_from_item(style, item, span))
|
||||
}
|
||||
|
||||
/// Parses an inner part of an attribute (the path and following tokens).
|
||||
@ -268,7 +261,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
/// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited.
|
||||
crate fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> {
|
||||
pub fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> {
|
||||
self.expect(&token::OpenDelim(token::Paren))?;
|
||||
|
||||
let cfg_predicate = self.parse_meta_item()?;
|
@ -1,14 +1,16 @@
|
||||
use super::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType, SeqSep, Parser};
|
||||
use crate::ast::{
|
||||
|
||||
use syntax::ast::{
|
||||
self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind,
|
||||
Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind,
|
||||
};
|
||||
use crate::token::{self, TokenKind, token_can_begin_expr};
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::{kw, sym};
|
||||
use crate::ThinVec;
|
||||
use crate::util::parser::AssocOp;
|
||||
use syntax::token::{self, TokenKind, token_can_begin_expr};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax::ThinVec;
|
||||
use syntax::util::parser::AssocOp;
|
||||
use syntax::struct_span_err;
|
||||
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, pluralize};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
@ -2,24 +2,23 @@ use super::{Parser, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode
|
||||
use super::{SemiColonMode, SeqSep, TokenExpectType};
|
||||
use super::pat::{GateOr, PARAM_EXPECTED};
|
||||
use super::diagnostics::Error;
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
|
||||
use crate::ast::{
|
||||
use syntax::ast::{
|
||||
self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode,
|
||||
Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind,
|
||||
FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit,
|
||||
};
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
use crate::token::{self, Token, TokenKind};
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
use crate::source_map::{self, Span};
|
||||
use crate::util::classify;
|
||||
use crate::util::literal::LitError;
|
||||
use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
|
||||
|
||||
use errors::{PResult, Applicability};
|
||||
use syntax::token::{self, Token, TokenKind};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::source_map::{self, Span};
|
||||
use syntax::util::classify;
|
||||
use syntax::util::literal::LitError;
|
||||
use syntax::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par};
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
use syntax_pos::Symbol;
|
||||
use errors::{PResult, Applicability};
|
||||
use std::mem;
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
|
@ -1,9 +1,8 @@
|
||||
use super::Parser;
|
||||
|
||||
use crate::ast::{self, WhereClause, GenericParam, GenericParamKind, GenericBounds, Attribute};
|
||||
use crate::token;
|
||||
use crate::source_map::DUMMY_SP;
|
||||
|
||||
use syntax::ast::{self, WhereClause, GenericParam, GenericParamKind, GenericBounds, Attribute};
|
||||
use syntax::token;
|
||||
use syntax::source_map::DUMMY_SP;
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
|
||||
use errors::PResult;
|
@ -2,23 +2,24 @@ use super::{Parser, PathStyle};
|
||||
use super::diagnostics::{Error, dummy_arg, ConsumeClosingDelim};
|
||||
|
||||
use crate::maybe_whole;
|
||||
use crate::ptr::P;
|
||||
use crate::ast::{self, Abi, DUMMY_NODE_ID, Ident, Attribute, AttrKind, AttrStyle, AnonConst, Item};
|
||||
use crate::ast::{ItemKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind, UseTree, UseTreeKind};
|
||||
use crate::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness};
|
||||
use crate::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind};
|
||||
use crate::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField};
|
||||
use crate::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, FnSig, SelfKind, Param};
|
||||
use crate::parse::token;
|
||||
use crate::tokenstream::{TokenTree, TokenStream};
|
||||
use crate::symbol::{kw, sym};
|
||||
use crate::source_map::{self, respan, Span};
|
||||
use crate::ThinVec;
|
||||
|
||||
use syntax::ast::{self, Abi, DUMMY_NODE_ID, Ident, Attribute, AttrKind, AttrStyle, AnonConst, Item};
|
||||
use syntax::ast::{ItemKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind, UseTree, UseTreeKind};
|
||||
use syntax::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness};
|
||||
use syntax::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind};
|
||||
use syntax::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField};
|
||||
use syntax::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, FnSig, SelfKind, Param};
|
||||
use syntax::ptr::P;
|
||||
use syntax::ThinVec;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
use syntax::source_map::{self, respan, Span};
|
||||
use syntax_pos::BytePos;
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
|
||||
use log::debug;
|
||||
use std::mem;
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, StashKey};
|
||||
use syntax_pos::BytePos;
|
||||
|
||||
/// Whether the type alias or associated type is a concrete type or an opaque type.
|
||||
#[derive(Debug)]
|
@ -11,24 +11,26 @@ mod generics;
|
||||
mod diagnostics;
|
||||
use diagnostics::Error;
|
||||
|
||||
use crate::ast::{
|
||||
use crate::{Directory, DirectoryOwnership};
|
||||
use crate::lexer::UnmatchedBrace;
|
||||
|
||||
use syntax::ast::{
|
||||
self, Abi, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident,
|
||||
IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety,
|
||||
};
|
||||
use crate::parse::{Directory, DirectoryOwnership};
|
||||
use crate::parse::lexer::UnmatchedBrace;
|
||||
use crate::util::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use crate::token::{self, Token, TokenKind, DelimToken};
|
||||
use crate::print::pprust;
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::source_map::respan;
|
||||
use crate::symbol::{kw, sym, Symbol};
|
||||
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
||||
use crate::ThinVec;
|
||||
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::token::{self, Token, TokenKind, DelimToken};
|
||||
use syntax::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::respan;
|
||||
use syntax::struct_span_err;
|
||||
use syntax::util::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use syntax_pos::symbol::{kw, sym, Symbol};
|
||||
use syntax_pos::{Span, BytePos, DUMMY_SP, FileName};
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
||||
use log::debug;
|
||||
|
||||
use std::borrow::Cow;
|
@ -2,13 +2,14 @@ use super::Parser;
|
||||
use super::item::ItemInfo;
|
||||
use super::diagnostics::Error;
|
||||
|
||||
use crate::attr;
|
||||
use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate};
|
||||
use crate::parse::{new_sub_parser_from_file, DirectoryOwnership};
|
||||
use crate::token::{self, TokenKind};
|
||||
use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName};
|
||||
use crate::symbol::sym;
|
||||
use crate::{new_sub_parser_from_file, DirectoryOwnership};
|
||||
|
||||
use syntax::attr;
|
||||
use syntax::ast::{self, Ident, Attribute, ItemKind, Mod, Crate};
|
||||
use syntax::token::{self, TokenKind};
|
||||
use syntax::source_map::{SourceMap, Span, DUMMY_SP, FileName};
|
||||
|
||||
use syntax_pos::symbol::sym;
|
||||
use errors::PResult;
|
||||
|
||||
use std::path::{self, Path, PathBuf};
|
||||
@ -39,17 +40,12 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
|
||||
pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
|
||||
let (in_cfg, outer_attrs) = {
|
||||
// FIXME(Centril): This results in a cycle between config and parsing.
|
||||
// Consider using dynamic dispatch via `self.sess` to disentangle the knot.
|
||||
let mut strip_unconfigured = crate::config::StripUnconfigured {
|
||||
sess: self.sess,
|
||||
features: None, // Don't perform gated feature checking.
|
||||
};
|
||||
let mut outer_attrs = outer_attrs.to_owned();
|
||||
strip_unconfigured.process_cfg_attrs(&mut outer_attrs);
|
||||
(!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs)
|
||||
};
|
||||
// HACK(Centril): See documentation on `ParseSess::process_cfg_mod`.
|
||||
let (in_cfg, outer_attrs) = (self.sess.process_cfg_mod)(
|
||||
self.sess,
|
||||
self.cfg_mods,
|
||||
outer_attrs,
|
||||
);
|
||||
|
||||
let id_span = self.token.span;
|
||||
let id = self.parse_ident()?;
|
@ -1,14 +1,13 @@
|
||||
use super::{Parser, PathStyle};
|
||||
|
||||
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
|
||||
use crate::ptr::P;
|
||||
use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac};
|
||||
use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind};
|
||||
use crate::mut_visit::{noop_visit_pat, noop_visit_mac, MutVisitor};
|
||||
use crate::token;
|
||||
use crate::print::pprust;
|
||||
use crate::source_map::{respan, Span, Spanned};
|
||||
use crate::ThinVec;
|
||||
use syntax::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac};
|
||||
use syntax::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind};
|
||||
use syntax::mut_visit::{noop_visit_pat, noop_visit_mac, MutVisitor};
|
||||
use syntax::ptr::P;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ThinVec;
|
||||
use syntax::token;
|
||||
use syntax::source_map::{respan, Span, Spanned};
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
use errors::{PResult, Applicability, DiagnosticBuilder};
|
||||
|
@ -1,10 +1,10 @@
|
||||
use super::{Parser, TokenType};
|
||||
|
||||
use crate::{maybe_whole, ThinVec};
|
||||
use crate::ast::{self, QSelf, Path, PathSegment, Ident, ParenthesizedArgs, AngleBracketedArgs};
|
||||
use crate::ast::{AnonConst, GenericArg, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode};
|
||||
use crate::token::{self, Token};
|
||||
use crate::source_map::{Span, BytePos};
|
||||
use crate::maybe_whole;
|
||||
use syntax::ast::{self, QSelf, Path, PathSegment, Ident, ParenthesizedArgs, AngleBracketedArgs};
|
||||
use syntax::ast::{AnonConst, GenericArg, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode};
|
||||
use syntax::ThinVec;
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::source_map::{Span, BytePos};
|
||||
use syntax_pos::symbol::{kw, sym};
|
||||
|
||||
use std::mem;
|
@ -3,16 +3,18 @@ use super::expr::LhsExpr;
|
||||
use super::path::PathStyle;
|
||||
use super::pat::GateOr;
|
||||
use super::diagnostics::Error;
|
||||
use crate::maybe_whole;
|
||||
use crate::DirectoryOwnership;
|
||||
|
||||
use crate::ptr::P;
|
||||
use crate::{maybe_whole, ThinVec};
|
||||
use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
|
||||
use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
|
||||
use crate::parse::DirectoryOwnership;
|
||||
use crate::util::classify;
|
||||
use crate::token;
|
||||
use crate::source_map::{respan, Span};
|
||||
use crate::symbol::{kw, sym};
|
||||
use syntax::ThinVec;
|
||||
use syntax::ptr::P;
|
||||
use syntax::ast;
|
||||
use syntax::ast::{DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind};
|
||||
use syntax::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter};
|
||||
use syntax::util::classify;
|
||||
use syntax::token;
|
||||
use syntax::source_map::{respan, Span};
|
||||
use syntax::symbol::{kw, sym};
|
||||
|
||||
use std::mem;
|
||||
use errors::{PResult, Applicability};
|
@ -2,13 +2,15 @@ use super::{Parser, PathStyle, PrevTokenKind, TokenType};
|
||||
use super::item::ParamCfg;
|
||||
|
||||
use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath};
|
||||
use crate::ptr::P;
|
||||
use crate::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident};
|
||||
use crate::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef};
|
||||
use crate::ast::{Mutability, AnonConst, Mac};
|
||||
use crate::token::{self, Token};
|
||||
use crate::source_map::Span;
|
||||
use crate::symbol::{kw};
|
||||
|
||||
use syntax::ptr::P;
|
||||
use syntax::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident};
|
||||
use syntax::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef};
|
||||
use syntax::ast::{Mutability, AnonConst, Mac};
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::source_map::Span;
|
||||
use syntax::struct_span_fatal;
|
||||
use syntax_pos::symbol::kw;
|
||||
|
||||
use errors::{PResult, Applicability, pluralize};
|
||||
|
111
src/librustc_parse/validate_attr.rs
Normal file
111
src/librustc_parse/validate_attr.rs
Normal file
@ -0,0 +1,111 @@
|
||||
//! Meta-syntax validation logic of attributes for post-expansion.
|
||||
|
||||
use errors::{PResult, Applicability};
|
||||
use syntax::ast::{self, Attribute, AttrKind, Ident, MetaItem};
|
||||
use syntax::attr::{AttributeTemplate, mk_name_value_item_str};
|
||||
use syntax::early_buffered_lints::BufferedEarlyLintId;
|
||||
use syntax::feature_gate::BUILTIN_ATTRIBUTE_MAP;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::TokenTree;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax_pos::{Symbol, sym};
|
||||
|
||||
pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
|
||||
let attr_info =
|
||||
attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)).map(|a| **a);
|
||||
|
||||
// Check input tokens for built-in and key-value attributes.
|
||||
match attr_info {
|
||||
// `rustc_dummy` doesn't have any restrictions specific to built-in attributes.
|
||||
Some((name, _, template, _)) if name != sym::rustc_dummy =>
|
||||
check_builtin_attribute(sess, attr, name, template),
|
||||
_ => if let Some(TokenTree::Token(token)) = attr.get_normal_item().tokens.trees().next() {
|
||||
if token == token::Eq {
|
||||
// All key-value attributes are restricted to meta-item syntax.
|
||||
parse_meta(sess, attr).map_err(|mut err| err.emit()).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, MetaItem> {
|
||||
Ok(match attr.kind {
|
||||
AttrKind::Normal(ref item) => MetaItem {
|
||||
path: item.path.clone(),
|
||||
kind: super::parse_in_attr(sess, attr, |p| p.parse_meta_item_kind())?,
|
||||
span: attr.span,
|
||||
},
|
||||
AttrKind::DocComment(comment) => {
|
||||
mk_name_value_item_str(Ident::new(sym::doc, attr.span), comment, attr.span)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn check_builtin_attribute(
|
||||
sess: &ParseSess,
|
||||
attr: &Attribute,
|
||||
name: Symbol,
|
||||
template: AttributeTemplate,
|
||||
) {
|
||||
// Some special attributes like `cfg` must be checked
|
||||
// before the generic check, so we skip them here.
|
||||
let should_skip = |name| name == sym::cfg;
|
||||
// Some of previously accepted forms were used in practice,
|
||||
// report them as warnings for now.
|
||||
let should_warn = |name| name == sym::doc || name == sym::ignore ||
|
||||
name == sym::inline || name == sym::link ||
|
||||
name == sym::test || name == sym::bench;
|
||||
|
||||
match parse_meta(sess, attr) {
|
||||
Ok(meta) => if !should_skip(name) && !template.compatible(&meta.kind) {
|
||||
let error_msg = format!("malformed `{}` attribute input", name);
|
||||
let mut msg = "attribute must be of the form ".to_owned();
|
||||
let mut suggestions = vec![];
|
||||
let mut first = true;
|
||||
if template.word {
|
||||
first = false;
|
||||
let code = format!("#[{}]", name);
|
||||
msg.push_str(&format!("`{}`", &code));
|
||||
suggestions.push(code);
|
||||
}
|
||||
if let Some(descr) = template.list {
|
||||
if !first {
|
||||
msg.push_str(" or ");
|
||||
}
|
||||
first = false;
|
||||
let code = format!("#[{}({})]", name, descr);
|
||||
msg.push_str(&format!("`{}`", &code));
|
||||
suggestions.push(code);
|
||||
}
|
||||
if let Some(descr) = template.name_value_str {
|
||||
if !first {
|
||||
msg.push_str(" or ");
|
||||
}
|
||||
let code = format!("#[{} = \"{}\"]", name, descr);
|
||||
msg.push_str(&format!("`{}`", &code));
|
||||
suggestions.push(code);
|
||||
}
|
||||
if should_warn(name) {
|
||||
sess.buffer_lint(
|
||||
BufferedEarlyLintId::IllFormedAttributeInput,
|
||||
meta.span,
|
||||
ast::CRATE_NODE_ID,
|
||||
&msg,
|
||||
);
|
||||
} else {
|
||||
sess.span_diagnostic.struct_span_err(meta.span, &error_msg)
|
||||
.span_suggestions(
|
||||
meta.span,
|
||||
if suggestions.len() == 1 {
|
||||
"must be of the form"
|
||||
} else {
|
||||
"the following are the possible correct uses"
|
||||
},
|
||||
suggestions.into_iter(),
|
||||
Applicability::HasPlaceholders,
|
||||
).emit();
|
||||
}
|
||||
}
|
||||
Err(mut err) => err.emit(),
|
||||
}
|
||||
}
|
@ -12,8 +12,9 @@ path = "lib.rs"
|
||||
log = "0.4"
|
||||
rustc = { path = "../librustc" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_index = { path = "../librustc_index" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
syntax = { path = "../libsyntax" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
rustc_index = { path = "../librustc_index" }
|
||||
|
@ -7,14 +7,15 @@
|
||||
// or type checking or some other kind of complex analysis.
|
||||
|
||||
use std::mem;
|
||||
use syntax::print::pprust;
|
||||
use rustc::lint;
|
||||
use rustc::session::Session;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_parse::validate_attr;
|
||||
use syntax::ast::*;
|
||||
use syntax::attr;
|
||||
use syntax::expand::is_proc_macro_attr;
|
||||
use syntax::feature_gate::is_builtin_attr;
|
||||
use syntax::print::pprust;
|
||||
use syntax::source_map::Spanned;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax::visit::{self, Visitor};
|
||||
@ -369,6 +370,10 @@ fn validate_generics_order<'a>(
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
fn visit_attribute(&mut self, attr: &Attribute) {
|
||||
validate_attr::check_meta(&self.session.parse_sess, attr);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr) {
|
||||
match &expr.kind {
|
||||
ExprKind::Closure(_, _, _, fn_decl, _, _) => {
|
||||
|
@ -13,6 +13,7 @@ log = "0.4"
|
||||
rustc = { path = "../librustc" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_codegen_utils = { path = "../librustc_codegen_utils" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
serde_json = "1"
|
||||
syntax = { path = "../libsyntax" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
|
@ -1,8 +1,6 @@
|
||||
use rustc::session::Session;
|
||||
|
||||
use crate::generated_code;
|
||||
|
||||
use syntax::parse::lexer::{self, StringReader};
|
||||
use rustc::session::Session;
|
||||
use rustc_parse::lexer::{self, StringReader};
|
||||
use syntax::token::{self, TokenKind};
|
||||
use syntax_pos::*;
|
||||
|
||||
|
@ -11,11 +11,12 @@ use std::fmt::Display;
|
||||
use std::io;
|
||||
use std::io::prelude::*;
|
||||
|
||||
use syntax::source_map::SourceMap;
|
||||
use syntax::parse::lexer;
|
||||
use rustc_parse::lexer;
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::SourceMap;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use syntax_pos::{Span, FileName};
|
||||
|
||||
/// Highlights `src`, returning the HTML output.
|
||||
@ -33,7 +34,7 @@ pub fn render_with_highlighting(
|
||||
class, tooltip).unwrap();
|
||||
}
|
||||
|
||||
let sess = ParseSess::with_silent_emitter();
|
||||
let sess = ParseSess::with_silent_emitter(process_configure_mod);
|
||||
let fm = sess.source_map().new_source_file(
|
||||
FileName::Custom(String::from("rustdoc-highlighting")),
|
||||
src.to_owned(),
|
||||
|
@ -29,6 +29,7 @@ extern crate rustc_resolve;
|
||||
extern crate rustc_lint;
|
||||
extern crate rustc_interface;
|
||||
extern crate rustc_metadata;
|
||||
extern crate rustc_parse;
|
||||
extern crate rustc_target;
|
||||
extern crate rustc_typeck;
|
||||
extern crate rustc_lexer;
|
||||
|
@ -1,8 +1,9 @@
|
||||
use errors::Applicability;
|
||||
use syntax::parse::lexer::{StringReader as Lexer};
|
||||
use rustc_parse::lexer::{StringReader as Lexer};
|
||||
use syntax::token;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use syntax_pos::{InnerSpan, FileName};
|
||||
|
||||
use crate::clean;
|
||||
@ -27,7 +28,7 @@ struct SyntaxChecker<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
|
||||
fn check_rust_syntax(&self, item: &clean::Item, dox: &str, code_block: RustCodeBlock) {
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let sess = ParseSess::new(FilePathMapping::empty(), process_configure_mod);
|
||||
let source_file = sess.source_map().new_source_file(
|
||||
FileName::Custom(String::from("doctest")),
|
||||
dox[code_block.code].to_owned(),
|
||||
|
@ -17,6 +17,7 @@ use std::path::PathBuf;
|
||||
use std::process::{self, Command, Stdio};
|
||||
use std::str;
|
||||
use syntax::symbol::sym;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use syntax_pos::{BytePos, DUMMY_SP, Pos, Span, FileName};
|
||||
use tempfile::Builder as TempFileBuilder;
|
||||
use testing;
|
||||
@ -398,7 +399,8 @@ pub fn make_test(s: &str,
|
||||
// Uses libsyntax to parse the doctest and find if there's a main fn and the extern
|
||||
// crate already is included.
|
||||
let (already_has_main, already_has_extern_crate, found_macro) = with_globals(edition, || {
|
||||
use crate::syntax::{parse, sess::ParseSess, source_map::FilePathMapping};
|
||||
use crate::syntax::{sess::ParseSess, source_map::FilePathMapping};
|
||||
use rustc_parse::maybe_new_parser_from_source_str;
|
||||
use errors::emitter::EmitterWriter;
|
||||
use errors::Handler;
|
||||
|
||||
@ -411,13 +413,13 @@ pub fn make_test(s: &str,
|
||||
let emitter = EmitterWriter::new(box io::sink(), None, false, false, false, None, false);
|
||||
// FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser
|
||||
let handler = Handler::with_emitter(false, None, box emitter);
|
||||
let sess = ParseSess::with_span_handler(handler, cm);
|
||||
let sess = ParseSess::with_span_handler(handler, cm, process_configure_mod);
|
||||
|
||||
let mut found_main = false;
|
||||
let mut found_extern_crate = cratename.is_none();
|
||||
let mut found_macro = false;
|
||||
|
||||
let mut parser = match parse::maybe_new_parser_from_source_str(&sess, filename, source) {
|
||||
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, source) {
|
||||
Ok(p) => p,
|
||||
Err(errs) => {
|
||||
for mut err in errs {
|
||||
|
@ -537,7 +537,7 @@ pub struct Pat {
|
||||
impl Pat {
|
||||
/// Attempt reparsing the pattern as a type.
|
||||
/// This is intended for use by diagnostics.
|
||||
pub(super) fn to_ty(&self) -> Option<P<Ty>> {
|
||||
pub fn to_ty(&self) -> Option<P<Ty>> {
|
||||
let kind = match &self.kind {
|
||||
// In a type expression `_` is an inference variable.
|
||||
PatKind::Wild => TyKind::Infer,
|
||||
@ -1031,7 +1031,7 @@ impl Expr {
|
||||
}
|
||||
}
|
||||
|
||||
fn to_bound(&self) -> Option<GenericBound> {
|
||||
pub fn to_bound(&self) -> Option<GenericBound> {
|
||||
match &self.kind {
|
||||
ExprKind::Path(None, path) => Some(GenericBound::Trait(
|
||||
PolyTraitRef::new(Vec::new(), path.clone(), self.span),
|
||||
@ -1042,7 +1042,7 @@ impl Expr {
|
||||
}
|
||||
|
||||
/// Attempts to reparse as `Ty` (for diagnostic purposes).
|
||||
pub(super) fn to_ty(&self) -> Option<P<Ty>> {
|
||||
pub fn to_ty(&self) -> Option<P<Ty>> {
|
||||
let kind = match &self.kind {
|
||||
// Trivial conversions.
|
||||
ExprKind::Path(qself, path) => TyKind::Path(qself.clone(), path.clone()),
|
||||
|
@ -1,7 +1,6 @@
|
||||
//! Parsing and validation of builtin attributes
|
||||
|
||||
use crate::ast::{self, Attribute, MetaItem, NestedMetaItem};
|
||||
use crate::early_buffered_lints::BufferedEarlyLintId;
|
||||
use crate::feature_gate::{Features, GatedCfg};
|
||||
use crate::print::pprust;
|
||||
use crate::sess::ParseSess;
|
||||
@ -25,9 +24,9 @@ enum AttrError {
|
||||
/// Only top-level shape (`#[attr]` vs `#[attr(...)]` vs `#[attr = ...]`) is considered now.
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct AttributeTemplate {
|
||||
crate word: bool,
|
||||
crate list: Option<&'static str>,
|
||||
crate name_value_str: Option<&'static str>,
|
||||
pub word: bool,
|
||||
pub list: Option<&'static str>,
|
||||
pub name_value_str: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl AttributeTemplate {
|
||||
@ -36,7 +35,7 @@ impl AttributeTemplate {
|
||||
}
|
||||
|
||||
/// Checks that the given meta-item is compatible with this template.
|
||||
fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool {
|
||||
pub fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool {
|
||||
match meta_item_kind {
|
||||
ast::MetaItemKind::Word => self.word,
|
||||
ast::MetaItemKind::List(..) => self.list.is_some(),
|
||||
@ -938,69 +937,3 @@ pub fn find_transparency(
|
||||
let fallback = if is_legacy { Transparency::SemiTransparent } else { Transparency::Opaque };
|
||||
(transparency.map_or(fallback, |t| t.0), error)
|
||||
}
|
||||
|
||||
pub fn check_builtin_attribute(
|
||||
sess: &ParseSess, attr: &ast::Attribute, name: Symbol, template: AttributeTemplate
|
||||
) {
|
||||
// Some special attributes like `cfg` must be checked
|
||||
// before the generic check, so we skip them here.
|
||||
let should_skip = |name| name == sym::cfg;
|
||||
// Some of previously accepted forms were used in practice,
|
||||
// report them as warnings for now.
|
||||
let should_warn = |name| name == sym::doc || name == sym::ignore ||
|
||||
name == sym::inline || name == sym::link ||
|
||||
name == sym::test || name == sym::bench;
|
||||
|
||||
match attr.parse_meta(sess) {
|
||||
Ok(meta) => if !should_skip(name) && !template.compatible(&meta.kind) {
|
||||
let error_msg = format!("malformed `{}` attribute input", name);
|
||||
let mut msg = "attribute must be of the form ".to_owned();
|
||||
let mut suggestions = vec![];
|
||||
let mut first = true;
|
||||
if template.word {
|
||||
first = false;
|
||||
let code = format!("#[{}]", name);
|
||||
msg.push_str(&format!("`{}`", &code));
|
||||
suggestions.push(code);
|
||||
}
|
||||
if let Some(descr) = template.list {
|
||||
if !first {
|
||||
msg.push_str(" or ");
|
||||
}
|
||||
first = false;
|
||||
let code = format!("#[{}({})]", name, descr);
|
||||
msg.push_str(&format!("`{}`", &code));
|
||||
suggestions.push(code);
|
||||
}
|
||||
if let Some(descr) = template.name_value_str {
|
||||
if !first {
|
||||
msg.push_str(" or ");
|
||||
}
|
||||
let code = format!("#[{} = \"{}\"]", name, descr);
|
||||
msg.push_str(&format!("`{}`", &code));
|
||||
suggestions.push(code);
|
||||
}
|
||||
if should_warn(name) {
|
||||
sess.buffer_lint(
|
||||
BufferedEarlyLintId::IllFormedAttributeInput,
|
||||
meta.span,
|
||||
ast::CRATE_NODE_ID,
|
||||
&msg,
|
||||
);
|
||||
} else {
|
||||
sess.span_diagnostic.struct_span_err(meta.span, &error_msg)
|
||||
.span_suggestions(
|
||||
meta.span,
|
||||
if suggestions.len() == 1 {
|
||||
"must be of the form"
|
||||
} else {
|
||||
"the following are the possible correct uses"
|
||||
},
|
||||
suggestions.into_iter(),
|
||||
Applicability::HasPlaceholders,
|
||||
).emit();
|
||||
}
|
||||
}
|
||||
Err(mut err) => err.emit(),
|
||||
}
|
||||
}
|
||||
|
@ -14,17 +14,13 @@ use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem};
|
||||
use crate::ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind, GenericParam};
|
||||
use crate::mut_visit::visit_clobber;
|
||||
use crate::source_map::{BytePos, Spanned};
|
||||
use crate::parse;
|
||||
use crate::token::{self, Token};
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{sym, Symbol};
|
||||
use crate::ThinVec;
|
||||
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||
use crate::GLOBALS;
|
||||
|
||||
use errors::PResult;
|
||||
|
||||
use log::debug;
|
||||
use syntax_pos::Span;
|
||||
|
||||
@ -281,7 +277,7 @@ impl MetaItem {
|
||||
}
|
||||
|
||||
impl AttrItem {
|
||||
crate fn meta(&self, span: Span) -> Option<MetaItem> {
|
||||
pub fn meta(&self, span: Span) -> Option<MetaItem> {
|
||||
let mut tokens = self.tokens.trees().peekable();
|
||||
Some(MetaItem {
|
||||
path: self.path.clone(),
|
||||
@ -328,21 +324,6 @@ impl Attribute {
|
||||
Some(mk_name_value_item_str(Ident::new(sym::doc, self.span), comment, self.span)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> {
|
||||
match self.kind {
|
||||
AttrKind::Normal(ref item) => {
|
||||
Ok(MetaItem {
|
||||
path: item.path.clone(),
|
||||
kind: parse::parse_in_attr(sess, self, |parser| parser.parse_meta_item_kind())?,
|
||||
span: self.span,
|
||||
})
|
||||
}
|
||||
AttrKind::DocComment(comment) => {
|
||||
Ok(mk_name_value_item_str(Ident::new(sym::doc, self.span), comment, self.span))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Constructors */
|
||||
@ -382,8 +363,12 @@ crate fn mk_attr_id() -> AttrId {
|
||||
}
|
||||
|
||||
pub fn mk_attr(style: AttrStyle, path: Path, tokens: TokenStream, span: Span) -> Attribute {
|
||||
mk_attr_from_item(style, AttrItem { path, tokens }, span)
|
||||
}
|
||||
|
||||
pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute {
|
||||
Attribute {
|
||||
kind: AttrKind::Normal(AttrItem { path, tokens }),
|
||||
kind: AttrKind::Normal(item),
|
||||
id: mk_attr_id(),
|
||||
style,
|
||||
span,
|
||||
|
@ -4,28 +4,6 @@
|
||||
// `:set tw=0` to disable.
|
||||
register_diagnostics! {
|
||||
|
||||
E0178: r##"
|
||||
In types, the `+` type operator has low precedence, so it is often necessary
|
||||
to use parentheses.
|
||||
|
||||
For example:
|
||||
|
||||
```compile_fail,E0178
|
||||
trait Foo {}
|
||||
|
||||
struct Bar<'a> {
|
||||
w: &'a Foo + Copy, // error, use &'a (Foo + Copy)
|
||||
x: &'a Foo + 'a, // error, use &'a (Foo + 'a)
|
||||
y: &'a mut Foo + 'a, // error, use &'a mut (Foo + 'a)
|
||||
z: fn() -> Foo + 'a, // error, use fn() -> (Foo + 'a)
|
||||
}
|
||||
```
|
||||
|
||||
More details can be found in [RFC 438].
|
||||
|
||||
[RFC 438]: https://github.com/rust-lang/rfcs/pull/438
|
||||
"##,
|
||||
|
||||
E0536: r##"
|
||||
The `not` cfg-predicate was malformed.
|
||||
|
||||
@ -278,106 +256,6 @@ pub fn something() {}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0583: r##"
|
||||
A file wasn't found for an out-of-line module.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```ignore (compile_fail not working here; see Issue #43707)
|
||||
mod file_that_doesnt_exist; // error: file not found for module
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
|
||||
Please be sure that a file corresponding to the module exists. If you
|
||||
want to use a module named `file_that_doesnt_exist`, you need to have a file
|
||||
named `file_that_doesnt_exist.rs` or `file_that_doesnt_exist/mod.rs` in the
|
||||
same directory.
|
||||
"##,
|
||||
|
||||
E0584: r##"
|
||||
A doc comment that is not attached to anything has been encountered.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0584
|
||||
trait Island {
|
||||
fn lost();
|
||||
|
||||
/// I'm lost!
|
||||
}
|
||||
```
|
||||
|
||||
A little reminder: a doc comment has to be placed before the item it's supposed
|
||||
to document. So if you want to document the `Island` trait, you need to put a
|
||||
doc comment before it, not inside it. Same goes for the `lost` method: the doc
|
||||
comment needs to be before it:
|
||||
|
||||
```
|
||||
/// I'm THE island!
|
||||
trait Island {
|
||||
/// I'm lost!
|
||||
fn lost();
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0585: r##"
|
||||
A documentation comment that doesn't document anything was found.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0585
|
||||
fn main() {
|
||||
// The following doc comment will fail:
|
||||
/// This is a useless doc comment!
|
||||
}
|
||||
```
|
||||
|
||||
Documentation comments need to be followed by items, including functions,
|
||||
types, modules, etc. Examples:
|
||||
|
||||
```
|
||||
/// I'm documenting the following struct:
|
||||
struct Foo;
|
||||
|
||||
/// I'm documenting the following function:
|
||||
fn foo() {}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0586: r##"
|
||||
An inclusive range was used with no end.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0586
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..=]; // error: inclusive range was used with no end
|
||||
}
|
||||
```
|
||||
|
||||
An inclusive range needs an end in order to *include* it. If you just need a
|
||||
start and no end, use a non-inclusive range (with `..`):
|
||||
|
||||
```
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..]; // ok!
|
||||
}
|
||||
```
|
||||
|
||||
Or put an end to your inclusive range:
|
||||
|
||||
```
|
||||
fn main() {
|
||||
let tmp = vec![0, 1, 2, 3, 4, 4, 3, 3, 2, 1];
|
||||
let x = &tmp[1..=3]; // ok!
|
||||
}
|
||||
```
|
||||
"##,
|
||||
|
||||
E0589: r##"
|
||||
The value of `N` that was specified for `repr(align(N))` was not a power
|
||||
of two, or was greater than 2^29.
|
||||
@ -446,35 +324,6 @@ and likely to change in the future.
|
||||
|
||||
"##,
|
||||
|
||||
E0704: r##"
|
||||
This error indicates that a incorrect visibility restriction was specified.
|
||||
|
||||
Example of erroneous code:
|
||||
|
||||
```compile_fail,E0704
|
||||
mod foo {
|
||||
pub(foo) struct Bar {
|
||||
x: i32
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To make struct `Bar` only visible in module `foo` the `in` keyword should be
|
||||
used:
|
||||
```
|
||||
mod foo {
|
||||
pub(in crate::foo) struct Bar {
|
||||
x: i32
|
||||
}
|
||||
}
|
||||
# fn main() {}
|
||||
```
|
||||
|
||||
For more information see the Rust Reference on [Visibility].
|
||||
|
||||
[Visibility]: https://doc.rust-lang.org/reference/visibility-and-privacy.html
|
||||
"##,
|
||||
|
||||
E0705: r##"
|
||||
A `#![feature]` attribute was declared for a feature that is stable in
|
||||
the current edition, but not in all editions.
|
||||
@ -504,20 +353,6 @@ Delete the offending feature attribute, or add it to the list of allowed
|
||||
features in the `-Z allow_features` flag.
|
||||
"##,
|
||||
|
||||
E0743: r##"
|
||||
C-variadic has been used on a non-foreign function.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0743
|
||||
fn foo2(x: u8, ...) {} // error!
|
||||
```
|
||||
|
||||
Only foreign functions can use C-variadic (`...`). It is used to give an
|
||||
undefined number of parameters to a given function (like `printf` in C). The
|
||||
equivalent in Rust would be to use macros directly.
|
||||
"##,
|
||||
|
||||
;
|
||||
|
||||
E0539, // incorrect meta item
|
||||
|
@ -3,18 +3,14 @@ use super::accepted::ACCEPTED_FEATURES;
|
||||
use super::removed::{REMOVED_FEATURES, STABLE_REMOVED_FEATURES};
|
||||
use super::builtin_attrs::{AttributeGate, BUILTIN_ATTRIBUTE_MAP};
|
||||
|
||||
use crate::ast::{
|
||||
self, AssocTyConstraint, AssocTyConstraintKind, NodeId, GenericParam, GenericParamKind,
|
||||
PatKind, RangeEnd, VariantData,
|
||||
};
|
||||
use crate::attr::{self, check_builtin_attribute};
|
||||
use crate::ast::{self, AssocTyConstraint, AssocTyConstraintKind, NodeId};
|
||||
use crate::ast::{GenericParam, GenericParamKind, PatKind, RangeEnd, VariantData};
|
||||
use crate::attr;
|
||||
use crate::source_map::Spanned;
|
||||
use crate::edition::{ALL_EDITIONS, Edition};
|
||||
use crate::visit::{self, FnKind, Visitor};
|
||||
use crate::token;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::{Symbol, sym};
|
||||
use crate::tokenstream::TokenTree;
|
||||
|
||||
use errors::{Applicability, DiagnosticBuilder, Handler};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
@ -331,19 +327,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||
if let Some((.., AttributeGate::Gated(_, name, descr, has_feature))) = attr_info {
|
||||
gate_feature_fn!(self, has_feature, attr.span, name, descr, GateStrength::Hard);
|
||||
}
|
||||
// Check input tokens for built-in and key-value attributes.
|
||||
match attr_info {
|
||||
// `rustc_dummy` doesn't have any restrictions specific to built-in attributes.
|
||||
Some((name, _, template, _)) if name != sym::rustc_dummy =>
|
||||
check_builtin_attribute(self.parse_sess, attr, name, template),
|
||||
_ => if let Some(TokenTree::Token(token)) =
|
||||
attr.get_normal_item().tokens.trees().next() {
|
||||
if token == token::Eq {
|
||||
// All key-value attributes are restricted to meta-item syntax.
|
||||
attr.parse_meta(self.parse_sess).map_err(|mut err| err.emit()).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check unstable flavors of the `#[doc]` attribute.
|
||||
if attr.check_name(sym::doc) {
|
||||
for nested_meta in attr.meta_item_list().unwrap_or_default() {
|
||||
|
@ -2,7 +2,6 @@ use super::*;
|
||||
|
||||
use crate::json::JsonEmitter;
|
||||
use crate::source_map::{FilePathMapping, SourceMap};
|
||||
use crate::tests::Shared;
|
||||
use crate::with_default_globals;
|
||||
|
||||
use errors::emitter::{ColorConfig, HumanReadableErrorType};
|
||||
@ -27,6 +26,20 @@ struct SpanTestData {
|
||||
pub column_end: u32,
|
||||
}
|
||||
|
||||
struct Shared<T> {
|
||||
data: Arc<Mutex<T>>,
|
||||
}
|
||||
|
||||
impl<T: Write> Write for Shared<T> {
|
||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||
self.data.lock().unwrap().write(buf)
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> io::Result<()> {
|
||||
self.data.lock().unwrap().flush()
|
||||
}
|
||||
}
|
||||
|
||||
/// Test the span yields correct positions in JSON.
|
||||
fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
|
||||
let expected_output = TestData { spans: vec![expected_output] };
|
||||
|
@ -26,11 +26,6 @@ pub use rustc_data_structures::thin_vec::ThinVec;
|
||||
use ast::AttrId;
|
||||
use syntax_pos::edition::Edition;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! unwrap_or {
|
||||
($opt:expr, $default:expr) => {
|
||||
@ -85,10 +80,10 @@ pub mod diagnostics {
|
||||
pub mod error_codes;
|
||||
|
||||
pub mod util {
|
||||
crate mod classify;
|
||||
pub mod classify;
|
||||
pub mod comments;
|
||||
pub mod lev_distance;
|
||||
crate mod literal;
|
||||
pub mod literal;
|
||||
pub mod node_count;
|
||||
pub mod parser;
|
||||
pub mod map_in_place;
|
||||
@ -100,11 +95,9 @@ pub mod ast;
|
||||
pub mod attr;
|
||||
pub mod expand;
|
||||
pub mod source_map;
|
||||
#[macro_use] pub mod config;
|
||||
pub mod entry;
|
||||
pub mod feature_gate;
|
||||
pub mod mut_visit;
|
||||
pub mod parse;
|
||||
pub mod ptr;
|
||||
pub mod show_span;
|
||||
pub use syntax_pos::edition;
|
||||
|
@ -22,9 +22,6 @@ use rustc_data_structures::sync::Lrc;
|
||||
use std::ops::DerefMut;
|
||||
use std::{panic, process, ptr};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
pub trait ExpectOne<A: Array> {
|
||||
fn expect_one(self, err: &'static str) -> A::Item;
|
||||
}
|
||||
|
@ -321,7 +321,7 @@ fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String {
|
||||
token_kind_to_string_ext(&token.kind, convert_dollar_crate)
|
||||
}
|
||||
|
||||
crate fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
||||
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
||||
match *nt {
|
||||
token::NtExpr(ref e) => expr_to_string(e),
|
||||
token::NtMeta(ref e) => attr_item_to_string(e),
|
||||
@ -939,8 +939,11 @@ impl<'a> State<'a> {
|
||||
self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
|
||||
}
|
||||
|
||||
crate fn print_mod(&mut self, _mod: &ast::Mod,
|
||||
attrs: &[ast::Attribute]) {
|
||||
pub fn print_mod(
|
||||
&mut self,
|
||||
_mod: &ast::Mod,
|
||||
attrs: &[ast::Attribute],
|
||||
) {
|
||||
self.print_inner_attributes(attrs);
|
||||
for item in &_mod.items {
|
||||
self.print_item(item);
|
||||
@ -955,7 +958,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) {
|
||||
pub fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) {
|
||||
if let Some(lt) = *lifetime {
|
||||
self.print_lifetime(lt);
|
||||
self.nbsp();
|
||||
@ -970,7 +973,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_type(&mut self, ty: &ast::Ty) {
|
||||
pub fn print_type(&mut self, ty: &ast::Ty) {
|
||||
self.maybe_print_comment(ty.span.lo());
|
||||
self.ibox(0);
|
||||
match ty.kind {
|
||||
@ -1995,7 +1998,7 @@ impl<'a> State<'a> {
|
||||
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
|
||||
}
|
||||
|
||||
crate fn print_expr(&mut self, expr: &ast::Expr) {
|
||||
pub fn print_expr(&mut self, expr: &ast::Expr) {
|
||||
self.print_expr_outer_attr_style(expr, true)
|
||||
}
|
||||
|
||||
@ -2332,7 +2335,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_usize(&mut self, i: usize) {
|
||||
pub fn print_usize(&mut self, i: usize) {
|
||||
self.s.word(i.to_string())
|
||||
}
|
||||
|
||||
@ -2601,7 +2604,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_type_bounds(&mut self, prefix: &'static str, bounds: &[ast::GenericBound]) {
|
||||
pub fn print_type_bounds(&mut self, prefix: &'static str, bounds: &[ast::GenericBound]) {
|
||||
if !bounds.is_empty() {
|
||||
self.s.word(prefix);
|
||||
let mut first = true;
|
||||
@ -2760,7 +2763,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn print_mutability(&mut self, mutbl: ast::Mutability) {
|
||||
pub fn print_mutability(&mut self, mutbl: ast::Mutability) {
|
||||
match mutbl {
|
||||
ast::Mutability::Mutable => self.word_nbsp("mut"),
|
||||
ast::Mutability::Immutable => {},
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Contains `ParseSess` which holds state living beyond what one `Parser` might.
|
||||
//! It also serves as an input to the parser itself.
|
||||
|
||||
use crate::ast::{CrateConfig, NodeId};
|
||||
use crate::ast::{CrateConfig, NodeId, Attribute};
|
||||
use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
|
||||
use crate::source_map::{SourceMap, FilePathMapping};
|
||||
use crate::feature_gate::UnstableFeatures;
|
||||
@ -71,14 +71,14 @@ impl GatedSpans {
|
||||
/// Info about a parsing session.
|
||||
pub struct ParseSess {
|
||||
pub span_diagnostic: Handler,
|
||||
crate unstable_features: UnstableFeatures,
|
||||
pub unstable_features: UnstableFeatures,
|
||||
pub config: CrateConfig,
|
||||
pub edition: Edition,
|
||||
pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
|
||||
/// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
|
||||
pub raw_identifier_spans: Lock<Vec<Span>>,
|
||||
/// Used to determine and report recursive module inclusions.
|
||||
pub(super) included_mod_stack: Lock<Vec<PathBuf>>,
|
||||
pub included_mod_stack: Lock<Vec<PathBuf>>,
|
||||
source_map: Lrc<SourceMap>,
|
||||
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
|
||||
/// Contains the spans of block expressions that could have been incomplete based on the
|
||||
@ -89,10 +89,22 @@ pub struct ParseSess {
|
||||
pub gated_spans: GatedSpans,
|
||||
/// The parser has reached `Eof` due to an unclosed brace. Used to silence unnecessary errors.
|
||||
pub reached_eof: Lock<bool>,
|
||||
/// Process the potential `cfg` attributes on a module.
|
||||
/// Also determine if the module should be included in this configuration.
|
||||
///
|
||||
/// HACK(Centril): This is used to break a cyclic dependency between
|
||||
/// the parser and cfg-stripping as defined in `syntax_expand::config`.
|
||||
/// The dependency edge from the parser comes from `parse_item_mod`.
|
||||
/// A principled solution to this hack would be to implement [#64197].
|
||||
///
|
||||
/// [#64197]: https://github.com/rust-lang/rust/issues/64197
|
||||
pub process_cfg_mod: ProcessCfgMod,
|
||||
}
|
||||
|
||||
pub type ProcessCfgMod = fn(&ParseSess, bool, &[Attribute]) -> (bool, Vec<Attribute>);
|
||||
|
||||
impl ParseSess {
|
||||
pub fn new(file_path_mapping: FilePathMapping) -> Self {
|
||||
pub fn new(file_path_mapping: FilePathMapping, process_cfg_mod: ProcessCfgMod) -> Self {
|
||||
let cm = Lrc::new(SourceMap::new(file_path_mapping));
|
||||
let handler = Handler::with_tty_emitter(
|
||||
ColorConfig::Auto,
|
||||
@ -100,12 +112,17 @@ impl ParseSess {
|
||||
None,
|
||||
Some(cm.clone()),
|
||||
);
|
||||
ParseSess::with_span_handler(handler, cm)
|
||||
ParseSess::with_span_handler(handler, cm, process_cfg_mod)
|
||||
}
|
||||
|
||||
pub fn with_span_handler(handler: Handler, source_map: Lrc<SourceMap>) -> Self {
|
||||
pub fn with_span_handler(
|
||||
handler: Handler,
|
||||
source_map: Lrc<SourceMap>,
|
||||
process_cfg_mod: ProcessCfgMod,
|
||||
) -> Self {
|
||||
Self {
|
||||
span_diagnostic: handler,
|
||||
process_cfg_mod,
|
||||
unstable_features: UnstableFeatures::from_environment(),
|
||||
config: FxHashSet::default(),
|
||||
edition: ExpnId::root().expn_data().edition,
|
||||
@ -121,10 +138,10 @@ impl ParseSess {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_silent_emitter() -> Self {
|
||||
pub fn with_silent_emitter(process_cfg_mod: ProcessCfgMod) -> Self {
|
||||
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let handler = Handler::with_emitter(false, None, Box::new(SilentEmitter));
|
||||
ParseSess::with_span_handler(handler, cm)
|
||||
ParseSess::with_span_handler(handler, cm, process_cfg_mod)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -107,14 +107,14 @@ impl fmt::Display for Lit {
|
||||
|
||||
impl LitKind {
|
||||
/// An English article for the literal token kind.
|
||||
crate fn article(self) -> &'static str {
|
||||
pub fn article(self) -> &'static str {
|
||||
match self {
|
||||
Integer | Err => "an",
|
||||
_ => "a",
|
||||
}
|
||||
}
|
||||
|
||||
crate fn descr(self) -> &'static str {
|
||||
pub fn descr(self) -> &'static str {
|
||||
match self {
|
||||
Bool => panic!("literal token contains `Lit::Bool`"),
|
||||
Byte => "byte",
|
||||
@ -141,12 +141,12 @@ impl Lit {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool {
|
||||
pub fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool {
|
||||
let ident_token = Token::new(Ident(name, is_raw), span);
|
||||
token_can_begin_expr(&ident_token)
|
||||
}
|
||||
|
||||
pub(crate) fn token_can_begin_expr(ident_token: &Token) -> bool {
|
||||
pub fn token_can_begin_expr(ident_token: &Token) -> bool {
|
||||
!ident_token.is_reserved_ident() ||
|
||||
ident_token.is_path_segment_keyword() ||
|
||||
match ident_token.kind {
|
||||
@ -276,7 +276,7 @@ impl TokenKind {
|
||||
|
||||
/// Returns tokens that are likely to be typed accidentally instead of the current token.
|
||||
/// Enables better error recovery when the wrong token is found.
|
||||
crate fn similar_tokens(&self) -> Option<Vec<TokenKind>> {
|
||||
pub fn similar_tokens(&self) -> Option<Vec<TokenKind>> {
|
||||
match *self {
|
||||
Comma => Some(vec![Dot, Lt, Semi]),
|
||||
Semi => Some(vec![Colon, Comma]),
|
||||
@ -291,7 +291,7 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Some token that will be thrown away later.
|
||||
crate fn dummy() -> Self {
|
||||
pub fn dummy() -> Self {
|
||||
Token::new(TokenKind::Whitespace, DUMMY_SP)
|
||||
}
|
||||
|
||||
@ -305,7 +305,7 @@ impl Token {
|
||||
mem::replace(self, Token::dummy())
|
||||
}
|
||||
|
||||
crate fn is_op(&self) -> bool {
|
||||
pub fn is_op(&self) -> bool {
|
||||
match self.kind {
|
||||
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
|
||||
Ident(..) | Lifetime(..) | Interpolated(..) |
|
||||
@ -314,7 +314,7 @@ impl Token {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn is_like_plus(&self) -> bool {
|
||||
pub fn is_like_plus(&self) -> bool {
|
||||
match self.kind {
|
||||
BinOp(Plus) | BinOpEq(Plus) => true,
|
||||
_ => false,
|
||||
@ -377,7 +377,7 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token can appear at the start of a const param.
|
||||
crate fn can_begin_const_arg(&self) -> bool {
|
||||
pub fn can_begin_const_arg(&self) -> bool {
|
||||
match self.kind {
|
||||
OpenDelim(Brace) => true,
|
||||
Interpolated(ref nt) => match **nt {
|
||||
@ -389,7 +389,7 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token can appear at the start of a generic bound.
|
||||
crate fn can_begin_bound(&self) -> bool {
|
||||
pub fn can_begin_bound(&self) -> bool {
|
||||
self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
|
||||
self == &Question || self == &OpenDelim(Paren)
|
||||
}
|
||||
@ -446,13 +446,13 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a lifetime.
|
||||
crate fn is_lifetime(&self) -> bool {
|
||||
pub fn is_lifetime(&self) -> bool {
|
||||
self.lifetime().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a identifier whose name is the given
|
||||
/// string slice.
|
||||
crate fn is_ident_named(&self, name: Symbol) -> bool {
|
||||
pub fn is_ident_named(&self, name: Symbol) -> bool {
|
||||
self.ident().map_or(false, |(ident, _)| ident.name == name)
|
||||
}
|
||||
|
||||
@ -469,7 +469,7 @@ impl Token {
|
||||
/// Would `maybe_whole_expr` in `parser.rs` return `Ok(..)`?
|
||||
/// That is, is this a pre-parsed expression dropped into the token stream
|
||||
/// (which happens while parsing the result of macro expansion)?
|
||||
crate fn is_whole_expr(&self) -> bool {
|
||||
pub fn is_whole_expr(&self) -> bool {
|
||||
if let Interpolated(ref nt) = self.kind {
|
||||
if let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtIdent(..) | NtBlock(_) = **nt {
|
||||
return true;
|
||||
@ -480,16 +480,16 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is either the `mut` or `const` keyword.
|
||||
crate fn is_mutability(&self) -> bool {
|
||||
pub fn is_mutability(&self) -> bool {
|
||||
self.is_keyword(kw::Mut) ||
|
||||
self.is_keyword(kw::Const)
|
||||
}
|
||||
|
||||
crate fn is_qpath_start(&self) -> bool {
|
||||
pub fn is_qpath_start(&self) -> bool {
|
||||
self == &Lt || self == &BinOp(Shl)
|
||||
}
|
||||
|
||||
crate fn is_path_start(&self) -> bool {
|
||||
pub fn is_path_start(&self) -> bool {
|
||||
self == &ModSep || self.is_qpath_start() || self.is_path() ||
|
||||
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
|
||||
}
|
||||
@ -499,23 +499,23 @@ impl Token {
|
||||
self.is_non_raw_ident_where(|id| id.name == kw)
|
||||
}
|
||||
|
||||
crate fn is_path_segment_keyword(&self) -> bool {
|
||||
pub fn is_path_segment_keyword(&self) -> bool {
|
||||
self.is_non_raw_ident_where(ast::Ident::is_path_segment_keyword)
|
||||
}
|
||||
|
||||
// Returns true for reserved identifiers used internally for elided lifetimes,
|
||||
// unnamed method parameters, crate root module, error recovery etc.
|
||||
crate fn is_special_ident(&self) -> bool {
|
||||
pub fn is_special_ident(&self) -> bool {
|
||||
self.is_non_raw_ident_where(ast::Ident::is_special)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a keyword used in the language.
|
||||
crate fn is_used_keyword(&self) -> bool {
|
||||
pub fn is_used_keyword(&self) -> bool {
|
||||
self.is_non_raw_ident_where(ast::Ident::is_used_keyword)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a keyword reserved for possible future use.
|
||||
crate fn is_unused_keyword(&self) -> bool {
|
||||
pub fn is_unused_keyword(&self) -> bool {
|
||||
self.is_non_raw_ident_where(ast::Ident::is_unused_keyword)
|
||||
}
|
||||
|
||||
@ -525,7 +525,7 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is the identifier `true` or `false`.
|
||||
crate fn is_bool_lit(&self) -> bool {
|
||||
pub fn is_bool_lit(&self) -> bool {
|
||||
self.is_non_raw_ident_where(|id| id.name.is_bool_lit())
|
||||
}
|
||||
|
||||
@ -537,7 +537,7 @@ impl Token {
|
||||
}
|
||||
}
|
||||
|
||||
crate fn glue(&self, joint: &Token) -> Option<Token> {
|
||||
pub fn glue(&self, joint: &Token) -> Option<Token> {
|
||||
let kind = match self.kind {
|
||||
Eq => match joint.kind {
|
||||
Eq => EqEq,
|
||||
|
@ -23,9 +23,6 @@ use smallvec::{SmallVec, smallvec};
|
||||
|
||||
use std::{iter, mem};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// When the main rust parser encounters a syntax-extension invocation, it
|
||||
/// parses the arguments to the invocation as a token-tree. This is a very
|
||||
/// loose structure, such that all sorts of different AST-fragments can
|
||||
@ -218,7 +215,7 @@ impl TokenStream {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
|
||||
pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
|
||||
match streams.len() {
|
||||
0 => TokenStream::default(),
|
||||
1 => streams.pop().unwrap(),
|
||||
|
@ -32,14 +32,14 @@ pub struct Comment {
|
||||
pub pos: BytePos,
|
||||
}
|
||||
|
||||
crate fn is_line_doc_comment(s: &str) -> bool {
|
||||
pub fn is_line_doc_comment(s: &str) -> bool {
|
||||
let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/') ||
|
||||
s.starts_with("//!");
|
||||
debug!("is {:?} a doc comment? {}", s, res);
|
||||
res
|
||||
}
|
||||
|
||||
crate fn is_block_doc_comment(s: &str) -> bool {
|
||||
pub fn is_block_doc_comment(s: &str) -> bool {
|
||||
// Prevent `/**/` from being parsed as a doc comment
|
||||
let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*') ||
|
||||
s.starts_with("/*!")) && s.len() >= 5;
|
||||
@ -47,7 +47,8 @@ crate fn is_block_doc_comment(s: &str) -> bool {
|
||||
res
|
||||
}
|
||||
|
||||
crate fn is_doc_comment(s: &str) -> bool {
|
||||
// FIXME(#64197): Try to privatize this again.
|
||||
pub fn is_doc_comment(s: &str) -> bool {
|
||||
(s.starts_with("///") && is_line_doc_comment(s)) || s.starts_with("//!") ||
|
||||
(s.starts_with("/**") && is_block_doc_comment(s)) || s.starts_with("/*!")
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ use rustc_lexer::unescape::{unescape_raw_str, unescape_raw_byte_str};
|
||||
|
||||
use std::ascii;
|
||||
|
||||
crate enum LitError {
|
||||
pub enum LitError {
|
||||
NotLiteral,
|
||||
LexerError,
|
||||
InvalidSuffix,
|
||||
@ -185,12 +185,12 @@ impl LitKind {
|
||||
|
||||
impl Lit {
|
||||
/// Converts literal token into an AST literal.
|
||||
crate fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
|
||||
pub fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
|
||||
Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span })
|
||||
}
|
||||
|
||||
/// Converts arbitrary token into an AST literal.
|
||||
crate fn from_token(token: &Token) -> Result<Lit, LitError> {
|
||||
pub fn from_token(token: &Token) -> Result<Lit, LitError> {
|
||||
let lit = match token.kind {
|
||||
token::Ident(name, false) if name.is_bool_lit() =>
|
||||
token::Lit::new(token::Bool, name, None),
|
||||
@ -217,8 +217,8 @@ impl Lit {
|
||||
Lit { token: kind.to_lit_token(), kind, span }
|
||||
}
|
||||
|
||||
/// Losslessly convert an AST literal into a token tree.
|
||||
crate fn token_tree(&self) -> TokenTree {
|
||||
/// Losslessly convert an AST literal into a token stream.
|
||||
pub fn token_tree(&self) -> TokenTree {
|
||||
let token = match self.token.kind {
|
||||
token::Bool => token::Ident(self.token.symbol, false),
|
||||
_ => token::Literal(self.token),
|
||||
|
@ -69,7 +69,7 @@ pub enum Fixity {
|
||||
|
||||
impl AssocOp {
|
||||
/// Creates a new AssocOP from a token
|
||||
crate fn from_token(t: &Token) -> Option<AssocOp> {
|
||||
pub fn from_token(t: &Token) -> Option<AssocOp> {
|
||||
use AssocOp::*;
|
||||
match t.kind {
|
||||
token::BinOpEq(k) => Some(AssignOp(k)),
|
||||
@ -358,7 +358,7 @@ impl ExprPrecedence {
|
||||
}
|
||||
|
||||
/// In `let p = e`, operators with precedence `<=` this one requires parenthesis in `e`.
|
||||
crate fn prec_let_scrutinee_needs_par() -> usize {
|
||||
pub fn prec_let_scrutinee_needs_par() -> usize {
|
||||
AssocOp::LAnd.precedence()
|
||||
}
|
||||
|
||||
|
@ -11,15 +11,12 @@ path = "lib.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
bitflags = "1.0"
|
||||
rustc_serialize = { path = "../libserialize", package = "serialize" }
|
||||
log = "0.4"
|
||||
scoped-tls = "1.0"
|
||||
lazy_static = "1.0.0"
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_index = { path = "../librustc_index" }
|
||||
rustc_lexer = { path = "../librustc_lexer" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
||||
syntax = { path = "../libsyntax" }
|
||||
|
@ -1,15 +1,15 @@
|
||||
use crate::expand::{self, AstFragment, Invocation};
|
||||
|
||||
use rustc_parse::{self, parser, DirectoryOwnership, MACRO_ARGUMENTS};
|
||||
use syntax::ast::{self, NodeId, Attribute, Name, PatKind};
|
||||
use syntax::attr::{self, HasAttrs, Stability, Deprecation};
|
||||
use syntax::source_map::SourceMap;
|
||||
use syntax::edition::Edition;
|
||||
use syntax::mut_visit::{self, MutVisitor};
|
||||
use syntax::parse::{self, parser, DirectoryOwnership};
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{kw, sym, Ident, Symbol};
|
||||
use syntax::{ThinVec, MACRO_ARGUMENTS};
|
||||
use syntax::ThinVec;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{self, TokenStream};
|
||||
use syntax::visit::Visitor;
|
||||
@ -18,9 +18,9 @@ use errors::{DiagnosticBuilder, DiagnosticId};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP};
|
||||
use syntax_pos::hygiene::{AstPass, ExpnId, ExpnData, ExpnKind};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::{self, Lrc};
|
||||
|
||||
use std::iter;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
@ -922,7 +922,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
expand::MacroExpander::new(self, true)
|
||||
}
|
||||
pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
|
||||
parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
|
||||
rustc_parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
|
||||
}
|
||||
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
||||
pub fn parse_sess(&self) -> &'a ParseSess { self.parse_sess }
|
||||
|
@ -1,20 +1,20 @@
|
||||
use crate::attr::HasAttrs;
|
||||
use crate::feature_gate::{
|
||||
use rustc_parse::validate_attr;
|
||||
use syntax::attr::HasAttrs;
|
||||
use syntax::feature_gate::{
|
||||
feature_err,
|
||||
EXPLAIN_STMT_ATTR_SYNTAX,
|
||||
Features,
|
||||
get_features,
|
||||
GateIssue,
|
||||
};
|
||||
use crate::attr;
|
||||
use crate::ast;
|
||||
use crate::edition::Edition;
|
||||
use crate::mut_visit::*;
|
||||
use crate::parse;
|
||||
use crate::ptr::P;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::symbol::sym;
|
||||
use crate::util::map_in_place::MapInPlace;
|
||||
use syntax::attr;
|
||||
use syntax::ast;
|
||||
use syntax::edition::Edition;
|
||||
use syntax::mut_visit::*;
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::util::map_in_place::MapInPlace;
|
||||
use syntax_pos::symbol::sym;
|
||||
|
||||
use errors::Applicability;
|
||||
use smallvec::SmallVec;
|
||||
@ -113,7 +113,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let res = parse::parse_in_attr(self.sess, &attr, |p| p.parse_cfg_attr());
|
||||
let res = rustc_parse::parse_in_attr(self.sess, &attr, |p| p.parse_cfg_attr());
|
||||
let (cfg_predicate, expanded_attrs) = match res {
|
||||
Ok(result) => result,
|
||||
Err(mut e) => {
|
||||
@ -135,12 +135,11 @@ impl<'a> StripUnconfigured<'a> {
|
||||
// `cfg_attr` inside of another `cfg_attr`. E.g.
|
||||
// `#[cfg_attr(false, cfg_attr(true, some_attr))]`.
|
||||
expanded_attrs.into_iter()
|
||||
.flat_map(|(item, span)| self.process_cfg_attr(ast::Attribute {
|
||||
kind: ast::AttrKind::Normal(item),
|
||||
id: attr::mk_attr_id(),
|
||||
style: attr.style,
|
||||
.flat_map(|(item, span)| self.process_cfg_attr(attr::mk_attr_from_item(
|
||||
attr.style,
|
||||
item,
|
||||
span,
|
||||
}))
|
||||
)))
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
@ -148,7 +147,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||
}
|
||||
|
||||
/// Determines if a node with the given attributes should be included in this configuration.
|
||||
pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {
|
||||
pub fn in_cfg(&self, attrs: &[ast::Attribute]) -> bool {
|
||||
attrs.iter().all(|attr| {
|
||||
if !is_cfg(attr) {
|
||||
return true;
|
||||
@ -168,7 +167,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||
true
|
||||
};
|
||||
|
||||
let meta_item = match attr.parse_meta(self.sess) {
|
||||
let meta_item = match validate_attr::parse_meta(self.sess, attr) {
|
||||
Ok(meta_item) => meta_item,
|
||||
Err(mut err) => { err.emit(); return true; }
|
||||
};
|
||||
@ -350,3 +349,17 @@ impl<'a> MutVisitor for StripUnconfigured<'a> {
|
||||
fn is_cfg(attr: &ast::Attribute) -> bool {
|
||||
attr.check_name(sym::cfg)
|
||||
}
|
||||
|
||||
/// Process the potential `cfg` attributes on a module.
|
||||
/// Also determine if the module should be included in this configuration.
|
||||
pub fn process_configure_mod(
|
||||
sess: &ParseSess,
|
||||
cfg_mods: bool,
|
||||
attrs: &[ast::Attribute],
|
||||
) -> (bool, Vec<ast::Attribute>) {
|
||||
// Don't perform gated feature checking.
|
||||
let mut strip_unconfigured = StripUnconfigured { sess, features: None };
|
||||
let mut attrs = attrs.to_owned();
|
||||
strip_unconfigured.process_cfg_attrs(&mut attrs);
|
||||
(!cfg_mods || strip_unconfigured.in_cfg(&attrs), attrs)
|
||||
}
|
@ -3,17 +3,18 @@ use crate::proc_macro::{collect_derives, MarkAttrs};
|
||||
use crate::hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind};
|
||||
use crate::mbe::macro_rules::annotate_err_with_kind;
|
||||
use crate::placeholders::{placeholder, PlaceholderExpander};
|
||||
use crate::config::StripUnconfigured;
|
||||
use crate::configure;
|
||||
|
||||
use rustc_parse::DirectoryOwnership;
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_parse::validate_attr;
|
||||
use syntax::ast::{self, AttrItem, Block, Ident, LitKind, NodeId, PatKind, Path};
|
||||
use syntax::ast::{MacStmtStyle, StmtKind, ItemKind};
|
||||
use syntax::attr::{self, HasAttrs};
|
||||
use syntax::source_map::respan;
|
||||
use syntax::configure;
|
||||
use syntax::config::StripUnconfigured;
|
||||
use syntax::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
|
||||
use syntax::mut_visit::*;
|
||||
use syntax::parse::DirectoryOwnership;
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::sess::ParseSess;
|
||||
@ -640,7 +641,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||
self.parse_ast_fragment(tok_result, fragment_kind, &item.path, span)
|
||||
}
|
||||
SyntaxExtensionKind::LegacyAttr(expander) => {
|
||||
match attr.parse_meta(self.cx.parse_sess) {
|
||||
match validate_attr::parse_meta(self.cx.parse_sess, &attr) {
|
||||
Ok(meta) => {
|
||||
let item = expander.expand(self.cx, span, &meta, item);
|
||||
fragment_kind.expect_from_annotatables(item)
|
||||
@ -1031,6 +1032,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
|
||||
let features = self.cx.ecfg.features.unwrap();
|
||||
for attr in attrs.iter() {
|
||||
feature_gate::check_attribute(attr, self.cx.parse_sess, features);
|
||||
validate_attr::check_meta(self.cx.parse_sess, attr);
|
||||
|
||||
// macros are expanded before any lint passes so this warning has to be hardcoded
|
||||
if attr.has_name(sym::derive) {
|
||||
|
@ -33,6 +33,35 @@ pub use mbe::macro_rules::compile_declarative_macro;
|
||||
pub mod base;
|
||||
pub mod build;
|
||||
pub mod expand;
|
||||
#[macro_use] pub mod config;
|
||||
pub mod proc_macro;
|
||||
|
||||
crate mod mbe;
|
||||
|
||||
// HACK(Centril, #64197): These shouldn't really be here.
|
||||
// Rather, they should be with their respective modules which are defined in other crates.
|
||||
// However, since for now constructing a `ParseSess` sorta requires `config` from this crate,
|
||||
// these tests will need to live here in the iterim.
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
#[cfg(test)]
|
||||
mod parse {
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
#[cfg(test)]
|
||||
mod lexer {
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tokenstream {
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod mut_visit {
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
}
|
||||
|
@ -76,9 +76,9 @@ use TokenTreeOrTokenTreeSlice::*;
|
||||
|
||||
use crate::mbe::{self, TokenTree};
|
||||
|
||||
use rustc_parse::Directory;
|
||||
use rustc_parse::parser::{Parser, PathStyle};
|
||||
use syntax::ast::{Ident, Name};
|
||||
use syntax::parse::Directory;
|
||||
use syntax::parse::parser::{Parser, PathStyle};
|
||||
use syntax::print::pprust;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
@ -652,7 +652,7 @@ pub(super) fn parse(
|
||||
directory,
|
||||
recurse_into_modules,
|
||||
true,
|
||||
syntax::MACRO_ARGUMENTS,
|
||||
rustc_parse::MACRO_ARGUMENTS,
|
||||
);
|
||||
|
||||
// A queue of possible matcher positions. We initialize it with the matcher position in which
|
||||
|
@ -8,30 +8,30 @@ use crate::mbe::macro_parser::{Error, Failure, Success};
|
||||
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedParseResult};
|
||||
use crate::mbe::transcribe::transcribe;
|
||||
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_parse::Directory;
|
||||
use syntax::ast;
|
||||
use syntax::attr::{self, TransparencyError};
|
||||
use syntax::edition::Edition;
|
||||
use syntax::feature_gate::Features;
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::parse::Directory;
|
||||
use syntax::print::pprust;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::token::{self, NtTT, Token, TokenKind::*};
|
||||
use syntax::tokenstream::{DelimSpan, TokenStream};
|
||||
|
||||
use errors::{DiagnosticBuilder, FatalError};
|
||||
use log::debug;
|
||||
use syntax_pos::hygiene::Transparency;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use errors::{DiagnosticBuilder, FatalError};
|
||||
use log::debug;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::{mem, slice};
|
||||
|
||||
use errors::Applicability;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
|
||||
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
|
||||
|
@ -1,10 +1,9 @@
|
||||
use super::*;
|
||||
|
||||
use crate::ast::{self, Ident};
|
||||
use crate::tests::{string_to_crate, matches_codepattern};
|
||||
use crate::print::pprust;
|
||||
use crate::mut_visit;
|
||||
use crate::with_default_globals;
|
||||
|
||||
use syntax::ast::{self, Ident};
|
||||
use syntax::print::pprust;
|
||||
use syntax::mut_visit::{self, MutVisitor};
|
||||
use syntax::with_default_globals;
|
||||
|
||||
// This version doesn't care about getting comments or doc-strings in.
|
||||
fn fake_print_crate(s: &mut pprust::State<'_>,
|
@ -1,15 +1,18 @@
|
||||
use super::*;
|
||||
use crate::config::process_configure_mod;
|
||||
|
||||
use crate::symbol::Symbol;
|
||||
use crate::source_map::{SourceMap, FilePathMapping};
|
||||
use crate::token;
|
||||
use crate::util::comments::is_doc_comment;
|
||||
use crate::with_default_globals;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_parse::lexer::StringReader;
|
||||
use syntax::token::{self, Token, TokenKind};
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{SourceMap, FilePathMapping};
|
||||
use syntax::util::comments::is_doc_comment;
|
||||
use syntax::with_default_globals;
|
||||
use syntax_pos::symbol::Symbol;
|
||||
use syntax_pos::{BytePos, Span};
|
||||
|
||||
use errors::{Handler, emitter::EmitterWriter};
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use syntax_pos::{BytePos, Span};
|
||||
|
||||
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
|
||||
let emitter = EmitterWriter::new(
|
||||
@ -21,7 +24,11 @@ fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
|
||||
None,
|
||||
false,
|
||||
);
|
||||
ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
|
||||
ParseSess::with_span_handler(
|
||||
Handler::with_emitter(true, None, Box::new(emitter)),
|
||||
sm,
|
||||
process_configure_mod,
|
||||
)
|
||||
}
|
||||
|
||||
// Creates a string reader for the given string.
|
@ -1,21 +1,27 @@
|
||||
use super::*;
|
||||
|
||||
use crate::ast::{self, Name, PatKind};
|
||||
use crate::attr::first_attr_value_str_by_name;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::parse::{PResult, new_parser_from_source_str};
|
||||
use crate::token::Token;
|
||||
use crate::print::pprust::item_to_string;
|
||||
use crate::ptr::P;
|
||||
use crate::source_map::FilePathMapping;
|
||||
use crate::symbol::{kw, sym};
|
||||
use crate::config::process_configure_mod;
|
||||
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
|
||||
use crate::tokenstream::{DelimSpan, TokenTree, TokenStream};
|
||||
use crate::with_default_globals;
|
||||
use syntax_pos::{Span, BytePos, Pos};
|
||||
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use syntax::ast::{self, Name, PatKind};
|
||||
use syntax::attr::first_attr_value_str_by_name;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::token::{self, Token};
|
||||
use syntax::print::pprust::item_to_string;
|
||||
use syntax::ptr::P;
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax::symbol::{kw, sym};
|
||||
use syntax::tokenstream::{DelimSpan, TokenTree, TokenStream};
|
||||
use syntax::visit;
|
||||
use syntax::with_default_globals;
|
||||
use syntax_pos::{Span, BytePos, Pos, FileName};
|
||||
use errors::PResult;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn sess() -> ParseSess {
|
||||
ParseSess::new(FilePathMapping::empty(), process_configure_mod)
|
||||
}
|
||||
|
||||
/// Parses an item.
|
||||
///
|
||||
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
|
||||
@ -32,18 +38,12 @@ fn sp(a: u32, b: u32) -> Span {
|
||||
|
||||
/// Parses a string, return an expression.
|
||||
fn string_to_expr(source_str : String) -> P<ast::Expr> {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
with_error_checking_parse(source_str, &ps, |p| {
|
||||
p.parse_expr()
|
||||
})
|
||||
with_error_checking_parse(source_str, &sess(), |p| p.parse_expr())
|
||||
}
|
||||
|
||||
/// Parses a string, returns an item.
|
||||
fn string_to_item(source_str : String) -> Option<P<ast::Item>> {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
with_error_checking_parse(source_str, &ps, |p| {
|
||||
p.parse_item()
|
||||
})
|
||||
with_error_checking_parse(source_str, &sess(), |p| p.parse_item())
|
||||
}
|
||||
|
||||
#[should_panic]
|
||||
@ -169,20 +169,20 @@ fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
|
||||
struct PatIdentVisitor {
|
||||
spans: Vec<Span>
|
||||
}
|
||||
impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor {
|
||||
impl<'a> visit::Visitor<'a> for PatIdentVisitor {
|
||||
fn visit_pat(&mut self, p: &'a ast::Pat) {
|
||||
match p.kind {
|
||||
PatKind::Ident(_ , ref ident, _) => {
|
||||
self.spans.push(ident.span.clone());
|
||||
}
|
||||
_ => {
|
||||
crate::visit::walk_pat(self, p);
|
||||
visit::walk_pat(self, p);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut v = PatIdentVisitor { spans: Vec::new() };
|
||||
crate::visit::walk_item(&mut v, &item);
|
||||
visit::walk_item(&mut v, &item);
|
||||
return v.spans;
|
||||
}
|
||||
|
||||
@ -233,7 +233,7 @@ let mut fflags: c_int = wb();
|
||||
|
||||
#[test] fn crlf_doc_comments() {
|
||||
with_default_globals(|| {
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let sess = sess();
|
||||
|
||||
let name_1 = FileName::Custom("crlf_source_1".to_string());
|
||||
let source = "/// doc comment\r\nfn foo() {}".to_string();
|
||||
@ -268,7 +268,7 @@ fn ttdelim_span() {
|
||||
}
|
||||
|
||||
with_default_globals(|| {
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let sess = sess();
|
||||
let expr = parse_expr_from_source_str(PathBuf::from("foo").into(),
|
||||
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
|
||||
|
||||
@ -292,11 +292,10 @@ fn ttdelim_span() {
|
||||
#[test]
|
||||
fn out_of_line_mod() {
|
||||
with_default_globals(|| {
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let item = parse_item_from_source_str(
|
||||
PathBuf::from("foo").into(),
|
||||
"mod foo { struct S; mod this_does_not_exist; }".to_owned(),
|
||||
&sess,
|
||||
&sess(),
|
||||
).unwrap().unwrap();
|
||||
|
||||
if let ast::ItemKind::Mod(ref m) = item.kind {
|
@ -4,7 +4,6 @@ use crate::proc_macro_server;
|
||||
use syntax::ast::{self, ItemKind, Attribute, Mac};
|
||||
use syntax::attr::{mark_used, mark_known};
|
||||
use syntax::errors::{Applicability, FatalError};
|
||||
use syntax::parse;
|
||||
use syntax::symbol::sym;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{self, TokenStream};
|
||||
@ -135,7 +134,11 @@ impl MultiItemModifier for ProcMacroDerive {
|
||||
let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
|
||||
let msg = "proc-macro derive produced unparseable tokens";
|
||||
|
||||
let mut parser = parse::stream_to_parser(ecx.parse_sess, stream, Some("proc-macro derive"));
|
||||
let mut parser = rustc_parse::stream_to_parser(
|
||||
ecx.parse_sess,
|
||||
stream,
|
||||
Some("proc-macro derive"),
|
||||
);
|
||||
let mut items = vec![];
|
||||
|
||||
loop {
|
||||
@ -200,7 +203,7 @@ crate fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>)
|
||||
if attr.get_normal_item().tokens.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
parse::parse_in_attr(cx.parse_sess, attr, |p| p.parse_derive_paths())
|
||||
rustc_parse::parse_in_attr(cx.parse_sess, attr, |p| p.parse_derive_paths())
|
||||
};
|
||||
|
||||
match parse_derive_paths(attr) {
|
||||
|
@ -1,17 +1,17 @@
|
||||
use crate::base::ExtCtxt;
|
||||
|
||||
use rustc_parse::{parse_stream_from_source_str, nt_to_tokenstream};
|
||||
use syntax::ast;
|
||||
use syntax::parse;
|
||||
use syntax::util::comments;
|
||||
use syntax::print::pprust;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
||||
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
|
||||
use syntax_pos::symbol::{kw, sym, Symbol};
|
||||
|
||||
use errors::Diagnostic;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
|
||||
use syntax_pos::symbol::{kw, sym, Symbol};
|
||||
|
||||
use pm::{Delimiter, Level, LineColumn, Spacing};
|
||||
use pm::bridge::{server, TokenTree};
|
||||
@ -178,7 +178,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
}
|
||||
|
||||
Interpolated(nt) => {
|
||||
let stream = parse::nt_to_tokenstream(&nt, sess, span);
|
||||
let stream = nt_to_tokenstream(&nt, sess, span);
|
||||
TokenTree::Group(Group {
|
||||
delimiter: Delimiter::None,
|
||||
stream,
|
||||
@ -402,7 +402,7 @@ impl server::TokenStream for Rustc<'_> {
|
||||
stream.is_empty()
|
||||
}
|
||||
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
||||
parse::parse_stream_from_source_str(
|
||||
parse_stream_from_source_str(
|
||||
FileName::proc_macro_source_code(src),
|
||||
src.to_string(),
|
||||
self.sess,
|
||||
|
@ -1,16 +1,15 @@
|
||||
use crate::ast;
|
||||
use crate::parse::source_file_to_stream;
|
||||
use crate::parse::new_parser_from_source_str;
|
||||
use crate::parse::parser::Parser;
|
||||
use crate::sess::ParseSess;
|
||||
use crate::source_map::{SourceMap, FilePathMapping};
|
||||
use crate::tokenstream::TokenStream;
|
||||
use crate::with_default_globals;
|
||||
use crate::config::process_configure_mod;
|
||||
use rustc_parse::{source_file_to_stream, new_parser_from_source_str, parser::Parser};
|
||||
use syntax::ast;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{SourceMap, FilePathMapping};
|
||||
use syntax::with_default_globals;
|
||||
use syntax_pos::{BytePos, Span, MultiSpan};
|
||||
|
||||
use errors::emitter::EmitterWriter;
|
||||
use errors::{PResult, Handler};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use syntax_pos::{BytePos, Span, MultiSpan};
|
||||
|
||||
use std::io;
|
||||
use std::io::prelude::*;
|
||||
@ -35,7 +34,7 @@ crate fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F)
|
||||
|
||||
/// Maps a string to tts, using a made-up filename.
|
||||
crate fn string_to_stream(source_str: String) -> TokenStream {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
let ps = ParseSess::new(FilePathMapping::empty(), process_configure_mod);
|
||||
source_file_to_stream(
|
||||
&ps,
|
||||
ps.source_map().new_source_file(PathBuf::from("bogofile").into(),
|
||||
@ -45,7 +44,7 @@ crate fn string_to_stream(source_str: String) -> TokenStream {
|
||||
|
||||
/// Parses a string, returns a crate.
|
||||
crate fn string_to_crate(source_str : String) -> ast::Crate {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
let ps = ParseSess::new(FilePathMapping::empty(), process_configure_mod);
|
||||
with_error_checking_parse(source_str, &ps, |p| {
|
||||
p.parse_crate_mod()
|
||||
})
|
@ -1,9 +1,11 @@
|
||||
use super::*;
|
||||
|
||||
use crate::ast::Name;
|
||||
use crate::with_default_globals;
|
||||
use crate::tests::string_to_stream;
|
||||
|
||||
use syntax::ast::Name;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::{TokenStream, TokenStreamBuilder, TokenTree};
|
||||
use syntax::with_default_globals;
|
||||
use syntax_pos::{Span, BytePos};
|
||||
use smallvec::smallvec;
|
||||
|
||||
fn string_to_ts(string: &str) -> TokenStream {
|
||||
string_to_stream(string.to_owned())
|
@ -14,6 +14,7 @@ errors = { path = "../librustc_errors", package = "rustc_errors" }
|
||||
fmt_macros = { path = "../libfmt_macros" }
|
||||
log = "0.4"
|
||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||
rustc_parse = { path = "../librustc_parse" }
|
||||
rustc_target = { path = "../librustc_target" }
|
||||
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
|
||||
syntax = { path = "../libsyntax" }
|
||||
|
@ -1,13 +1,13 @@
|
||||
use errors::{Applicability, DiagnosticBuilder};
|
||||
|
||||
use rustc_parse::parser::Parser;
|
||||
use syntax::ast::{self, *};
|
||||
use syntax_expand::base::*;
|
||||
use syntax::token::{self, TokenKind};
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{sym, Symbol};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
use syntax_expand::base::*;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
||||
pub fn expand_assert<'cx>(
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
use syntax::ast::{self, AttrItem, AttrStyle};
|
||||
use syntax::attr::mk_attr;
|
||||
use syntax::parse;
|
||||
use syntax::token;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax_expand::panictry;
|
||||
@ -10,7 +9,7 @@ use syntax_pos::FileName;
|
||||
|
||||
pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate {
|
||||
for raw_attr in attrs {
|
||||
let mut parser = parse::new_parser_from_source_str(
|
||||
let mut parser = rustc_parse::new_parser_from_source_str(
|
||||
parse_sess,
|
||||
FileName::cli_crate_attr_source_code(&raw_attr),
|
||||
raw_attr.clone(),
|
||||
|
@ -1,13 +1,13 @@
|
||||
use syntax_expand::panictry;
|
||||
use syntax_expand::base::{self, *};
|
||||
use rustc_parse::{self, DirectoryOwnership, new_sub_parser_from_file, parser::Parser};
|
||||
use syntax::ast;
|
||||
use syntax::parse::{self, DirectoryOwnership};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::token;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax::early_buffered_lints::BufferedEarlyLintId;
|
||||
use syntax_expand::panictry;
|
||||
use syntax_expand::base::{self, *};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
use syntax_pos::{self, Pos, Span};
|
||||
@ -85,10 +85,10 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
},
|
||||
};
|
||||
let directory_ownership = DirectoryOwnership::Owned { relative: None };
|
||||
let p = parse::new_sub_parser_from_file(cx.parse_sess(), &file, directory_ownership, None, sp);
|
||||
let p = new_sub_parser_from_file(cx.parse_sess(), &file, directory_ownership, None, sp);
|
||||
|
||||
struct ExpandResult<'a> {
|
||||
p: parse::parser::Parser<'a>,
|
||||
p: Parser<'a>,
|
||||
}
|
||||
impl<'a> base::MacResult for ExpandResult<'a> {
|
||||
fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
|
||||
|
@ -1,11 +1,12 @@
|
||||
use rustc_parse::validate_attr;
|
||||
use syntax_pos::Symbol;
|
||||
use syntax::ast::MetaItem;
|
||||
use syntax::attr::{check_builtin_attribute, AttributeTemplate};
|
||||
use syntax::attr::AttributeTemplate;
|
||||
use syntax_expand::base::ExtCtxt;
|
||||
|
||||
pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) {
|
||||
// All the built-in macro attributes are "words" at the moment.
|
||||
let template = AttributeTemplate::only_word();
|
||||
let attr = ecx.attribute(meta_item.clone());
|
||||
check_builtin_attribute(ecx.parse_sess, &attr, name, template);
|
||||
validate_attr::check_builtin_attribute(ecx.parse_sess, &attr, name, template);
|
||||
}
|
||||
|
@ -6,21 +6,23 @@
|
||||
#![feature(rustc_private)]
|
||||
|
||||
extern crate syntax;
|
||||
extern crate syntax_expand;
|
||||
extern crate rustc_parse;
|
||||
extern crate rustc_errors;
|
||||
|
||||
use rustc_errors::PResult;
|
||||
use rustc_parse::parser::attr::*;
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use rustc_parse::parser::Parser;
|
||||
use syntax::ast::*;
|
||||
use syntax::attr::*;
|
||||
use syntax::ast;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{FilePathMapping, FileName};
|
||||
use syntax::parse;
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax::parse::parser::Parser;
|
||||
use syntax::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::parse::parser::attr::*;
|
||||
use syntax::print::pprust;
|
||||
use syntax::token;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
use std::fmt;
|
||||
|
||||
// Copied out of syntax::util::parser_testing
|
||||
@ -72,8 +74,12 @@ fn str_compare<T, F: Fn(&T) -> String>(e: &str, expected: &[T], actual: &[T], f:
|
||||
}
|
||||
}
|
||||
|
||||
fn sess() -> ParseSess {
|
||||
ParseSess::new(FilePathMapping::empty(), process_configure_mod)
|
||||
}
|
||||
|
||||
fn check_expr_attrs(es: &str, expected: &[&str]) {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
let ps = sess();
|
||||
let e = expr(es, &ps).expect("parse error");
|
||||
let actual = &e.attrs;
|
||||
str_compare(es,
|
||||
@ -83,7 +89,7 @@ fn check_expr_attrs(es: &str, expected: &[&str]) {
|
||||
}
|
||||
|
||||
fn check_stmt_attrs(es: &str, expected: &[&str]) {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
let ps = sess();
|
||||
let e = stmt(es, &ps).expect("parse error");
|
||||
let actual = e.kind.attrs();
|
||||
str_compare(es,
|
||||
@ -93,7 +99,7 @@ fn check_stmt_attrs(es: &str, expected: &[&str]) {
|
||||
}
|
||||
|
||||
fn reject_expr_parse(es: &str) {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
let ps = sess();
|
||||
match expr(es, &ps) {
|
||||
Ok(_) => panic!("parser did not reject `{}`", es),
|
||||
Err(mut e) => e.cancel(),
|
||||
@ -101,7 +107,7 @@ fn reject_expr_parse(es: &str) {
|
||||
}
|
||||
|
||||
fn reject_stmt_parse(es: &str) {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
let ps = sess();
|
||||
match stmt(es, &ps) {
|
||||
Ok(_) => panic!("parser did not reject `{}`", es),
|
||||
Err(mut e) => e.cancel(),
|
||||
|
@ -5,11 +5,14 @@
|
||||
#![feature(rustc_private)]
|
||||
|
||||
extern crate syntax;
|
||||
extern crate syntax_expand;
|
||||
extern crate rustc_parse;
|
||||
|
||||
use rustc_parse::new_parser_from_file;
|
||||
use std::path::Path;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax::parse;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
|
||||
#[path = "mod_dir_simple/test.rs"]
|
||||
mod gravy;
|
||||
@ -21,10 +24,10 @@ pub fn main() {
|
||||
}
|
||||
|
||||
fn parse() {
|
||||
let parse_session = ParseSess::new(FilePathMapping::empty());
|
||||
let parse_session = ParseSess::new(FilePathMapping::empty(), process_configure_mod);
|
||||
|
||||
let path = Path::new(file!());
|
||||
let path = path.canonicalize().unwrap();
|
||||
let mut parser = parse::new_parser_from_file(&parse_session, &path);
|
||||
let mut parser = new_parser_from_file(&parse_session, &path);
|
||||
let _ = parser.parse_crate_mod();
|
||||
}
|
||||
|
@ -21,21 +21,24 @@
|
||||
|
||||
extern crate rustc_data_structures;
|
||||
extern crate syntax;
|
||||
extern crate syntax_expand;
|
||||
extern crate rustc_parse;
|
||||
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use syntax::ast::*;
|
||||
use syntax::sess::ParseSess;
|
||||
use syntax::source_map::{Spanned, DUMMY_SP, FileName};
|
||||
use syntax::source_map::FilePathMapping;
|
||||
use syntax::mut_visit::{self, MutVisitor, visit_clobber};
|
||||
use syntax::parse;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax_expand::config::process_configure_mod;
|
||||
|
||||
fn parse_expr(ps: &ParseSess, src: &str) -> Option<P<Expr>> {
|
||||
let src_as_string = src.to_string();
|
||||
|
||||
let mut p = parse::new_parser_from_source_str(
|
||||
let mut p = new_parser_from_source_str(
|
||||
ps,
|
||||
FileName::Custom(src_as_string.clone()),
|
||||
src_as_string,
|
||||
@ -202,7 +205,7 @@ fn main() {
|
||||
}
|
||||
|
||||
fn run() {
|
||||
let ps = ParseSess::new(FilePathMapping::empty());
|
||||
let ps = ParseSess::new(FilePathMapping::empty(), process_configure_mod);
|
||||
|
||||
iter_exprs(2, &mut |mut e| {
|
||||
// If the pretty printer is correct, then `parse(print(e))` should be identical to `e`,
|
||||
|
@ -1,3 +1,15 @@
|
||||
error: malformed `proc_macro_derive` attribute input
|
||||
--> $DIR/attribute.rs:9:1
|
||||
|
|
||||
LL | #[proc_macro_derive]
|
||||
| ^^^^^^^^^^^^^^^^^^^^ help: must be of the form: `#[proc_macro_derive(TraitName, /*opt*/ attributes(name1, name2, ...))]`
|
||||
|
||||
error: malformed `proc_macro_derive` attribute input
|
||||
--> $DIR/attribute.rs:12:1
|
||||
|
|
||||
LL | #[proc_macro_derive = ""]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: must be of the form: `#[proc_macro_derive(TraitName, /*opt*/ attributes(name1, name2, ...))]`
|
||||
|
||||
error: attribute must have either one or two arguments
|
||||
--> $DIR/attribute.rs:15:1
|
||||
|
|
||||
@ -88,17 +100,5 @@ error: `self` cannot be a name of derive helper attribute
|
||||
LL | #[proc_macro_derive(d17, attributes(self))]
|
||||
| ^^^^
|
||||
|
||||
error: malformed `proc_macro_derive` attribute input
|
||||
--> $DIR/attribute.rs:9:1
|
||||
|
|
||||
LL | #[proc_macro_derive]
|
||||
| ^^^^^^^^^^^^^^^^^^^^ help: must be of the form: `#[proc_macro_derive(TraitName, /*opt*/ attributes(name1, name2, ...))]`
|
||||
|
||||
error: malformed `proc_macro_derive` attribute input
|
||||
--> $DIR/attribute.rs:12:1
|
||||
|
|
||||
LL | #[proc_macro_derive = ""]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^ help: must be of the form: `#[proc_macro_derive(TraitName, /*opt*/ attributes(name1, name2, ...))]`
|
||||
|
||||
error: aborting due to 17 previous errors
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user