9780: Support exclusive_range_pattern r=matklad a=lf-

Fix #9779

Co-authored-by: Jade <software@lfcode.ca>
This commit is contained in:
bors[bot] 2021-08-04 18:12:37 +00:00 committed by GitHub
commit 950efff5c6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 105 additions and 41 deletions

View File

@ -51,7 +51,7 @@
//! The `GetDeclaredType` takes `Syntax` as input, and returns `Symbol` as
//! output. First, it retrieves a `Symbol` for parent `Syntax`:
//!
//! * https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1423
//! * <https://sourceroslyn.io/#Microsoft.CodeAnalysis.CSharp/Compilation/SyntaxTreeSemanticModel.cs,1423>
//!
//! Then, it iterates parent symbol's children, looking for one which has the
//! same text span as the original node:

View File

@ -39,8 +39,7 @@
//!
//! Splitting is implemented in the [`Constructor::split`] function. We don't do splitting for
//! or-patterns; instead we just try the alternatives one-by-one. For details on splitting
//! wildcards, see [`SplitWildcard`]; for integer ranges, see [`SplitIntRange`]; for slices, see
//! [`SplitVarLenSlice`].
//! wildcards, see [`SplitWildcard`]; for integer ranges, see [`SplitIntRange`].
use std::{
cmp::{max, min},

View File

@ -120,8 +120,8 @@ pub fn visit_file_defs(
///
/// Note that, by default, rust-analyzer tests **do not** include core or std
/// libraries. If you are writing tests for functionality using [`FamousDefs`],
/// you'd want to include [minicore](test_utils::MiniCore) declaration at the
/// start of your tests:
/// you'd want to include minicore (see `test_utils::MiniCore`) declaration at
/// the start of your tests:
///
/// ```
/// //- minicore: iterator, ord, derive

View File

@ -6,9 +6,9 @@
//! each submodule starts with `use super::*` import and exports
//! "public" productions via `pub(super)`.
//!
//! See docs for `Parser` to learn about API, available to the grammar,
//! and see docs for `Event` to learn how this actually manages to
//! produce parse trees.
//! See docs for [`Parser`](super::parser::Parser) to learn about API,
//! available to the grammar, and see docs for [`Event`](super::event::Event)
//! to learn how this actually manages to produce parse trees.
//!
//! Code in this module also contains inline tests, which start with
//! `// test name-of-the-test` comment and look like this:

View File

@ -65,14 +65,26 @@ fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) {
// match 92 {
// 0 ... 100 => (),
// 101 ..= 200 => (),
// 200 .. 301=> (),
// 200 .. 301 => (),
// 302 .. => (),
// }
// }
// FIXME: support half_open_range_patterns (`..=2`),
// exclusive_range_pattern (`..5`) with missing lhs
for &range_op in [T![...], T![..=], T![..]].iter() {
if p.at(range_op) {
let m = lhs.precede(p);
p.bump(range_op);
atom_pat(p, recovery_set);
// `0 .. =>` or `let 0 .. =`
// ^ ^
if p.at(T![=]) {
// test half_open_range_pat
// fn f() { let 0 .. = 1u32; }
} else {
atom_pat(p, recovery_set);
}
m.complete(p, RANGE_PAT);
return;
}
@ -84,7 +96,7 @@ const PAT_RECOVERY_SET: TokenSet =
TokenSet::new(&[T![let], T![if], T![while], T![loop], T![match], T![')'], T![,], T![=]]);
fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
let m = match p.nth(0) {
let m = match p.current() {
T![box] => box_pat(p),
T![ref] | T![mut] => ident_pat(p, true),
T![const] => const_block_pat(p),

View File

@ -1,17 +1,20 @@
//! The Rust parser.
//!
//! The parser doesn't know about concrete representation of tokens and syntax
//! trees. Abstract `TokenSource` and `TreeSink` traits are used instead. As a
//! consequence, this crates does not contain a lexer.
//! trees. Abstract [`TokenSource`] and [`TreeSink`] traits are used instead.
//! As a consequence, this crate does not contain a lexer.
//!
//! The `Parser` struct from the `parser` module is a cursor into the sequence
//! of tokens. Parsing routines use `Parser` to inspect current state and
//! advance the parsing.
//! The [`Parser`] struct from the [`parser`] module is a cursor into the
//! sequence of tokens. Parsing routines use [`Parser`] to inspect current
//! state and advance the parsing.
//!
//! The actual parsing happens in the `grammar` module.
//! The actual parsing happens in the [`grammar`] module.
//!
//! Tests for this crate live in `syntax` crate.
//! Tests for this crate live in the `syntax` crate.
//!
//! [`Parser`]: crate::parser::Parser
#![allow(rustdoc::private_intra_doc_links)]
#[macro_use]
mod token_set;
#[macro_use]

View File

@ -14,7 +14,7 @@ use crate::{
/// `Parser` struct provides the low-level API for
/// navigating through the stream of tokens and
/// constructing the parse tree. The actual parsing
/// happens in the `grammar` module.
/// happens in the [`grammar`](super::grammar) module.
///
/// However, the result of this `Parser` is not a real
/// tree, but rather a flat stream of events of the form
@ -262,7 +262,7 @@ impl<'t> Parser<'t> {
}
}
/// See `Parser::start`.
/// See [`Parser::start`].
pub(crate) struct Marker {
pos: u32,
bomb: DropBomb,
@ -320,7 +320,8 @@ impl CompletedMarker {
/// node `A`, then complete it, and then after parsing the
/// whole `A`, decide that it should have started some node
/// `B` before starting `A`. `precede` allows to do exactly
/// that. See also docs about `forward_parent` in `Event::Start`.
/// that. See also docs about
/// [`Event::Start::forward_parent`](crate::event::Event::Start::forward_parent).
///
/// Given completed events `[START, FINISH]` and its corresponding
/// `CompletedMarker(pos: 0, _)`.

View File

@ -197,12 +197,12 @@ pub enum AttrKind {
}
impl AttrKind {
/// Returns `true` if the attr_kind is [`Inner`].
/// Returns `true` if the attr_kind is [`Inner`](Self::Inner).
pub fn is_inner(&self) -> bool {
matches!(self, Self::Inner)
}
/// Returns `true` if the attr_kind is [`Outer`].
/// Returns `true` if the attr_kind is [`Outer`](Self::Outer).
pub fn is_outer(&self) -> bool {
matches!(self, Self::Outer)
}

View File

@ -1,5 +1,5 @@
SOURCE_FILE@0..112
FN@0..111
SOURCE_FILE@0..135
FN@0..134
FN_KW@0..2 "fn"
WHITESPACE@2..3 " "
NAME@3..7
@ -8,16 +8,16 @@ SOURCE_FILE@0..112
L_PAREN@7..8 "("
R_PAREN@8..9 ")"
WHITESPACE@9..10 " "
BLOCK_EXPR@10..111
BLOCK_EXPR@10..134
L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n "
MATCH_EXPR@16..109
MATCH_EXPR@16..132
MATCH_KW@16..21 "match"
WHITESPACE@21..22 " "
LITERAL@22..24
INT_NUMBER@22..24 "92"
WHITESPACE@24..25 " "
MATCH_ARM_LIST@25..109
MATCH_ARM_LIST@25..132
L_CURLY@25..26 "{"
WHITESPACE@26..35 "\n "
MATCH_ARM@35..51
@ -58,7 +58,7 @@ SOURCE_FILE@0..112
R_PAREN@76..77 ")"
COMMA@77..78 ","
WHITESPACE@78..87 "\n "
MATCH_ARM@87..103
MATCH_ARM@87..104
RANGE_PAT@87..97
LITERAL_PAT@87..90
LITERAL@87..90
@ -69,14 +69,30 @@ SOURCE_FILE@0..112
LITERAL_PAT@94..97
LITERAL@94..97
INT_NUMBER@94..97 "301"
FAT_ARROW@97..99 "=>"
WHITESPACE@99..100 " "
TUPLE_EXPR@100..102
L_PAREN@100..101 "("
R_PAREN@101..102 ")"
COMMA@102..103 ","
WHITESPACE@103..108 "\n "
R_CURLY@108..109 "}"
WHITESPACE@109..110 "\n"
R_CURLY@110..111 "}"
WHITESPACE@111..112 "\n"
WHITESPACE@97..98 " "
FAT_ARROW@98..100 "=>"
WHITESPACE@100..101 " "
TUPLE_EXPR@101..103
L_PAREN@101..102 "("
R_PAREN@102..103 ")"
COMMA@103..104 ","
WHITESPACE@104..113 "\n "
MATCH_ARM@113..126
RANGE_PAT@113..119
LITERAL_PAT@113..116
LITERAL@113..116
INT_NUMBER@113..116 "302"
WHITESPACE@116..117 " "
DOT2@117..119 ".."
WHITESPACE@119..120 " "
FAT_ARROW@120..122 "=>"
WHITESPACE@122..123 " "
TUPLE_EXPR@123..125
L_PAREN@123..124 "("
R_PAREN@124..125 ")"
COMMA@125..126 ","
WHITESPACE@126..131 "\n "
R_CURLY@131..132 "}"
WHITESPACE@132..133 "\n"
R_CURLY@133..134 "}"
WHITESPACE@134..135 "\n"

View File

@ -2,6 +2,7 @@ fn main() {
match 92 {
0 ... 100 => (),
101 ..= 200 => (),
200 .. 301=> (),
200 .. 301 => (),
302 .. => (),
}
}

View File

@ -0,0 +1,31 @@
SOURCE_FILE@0..28
FN@0..27
FN_KW@0..2 "fn"
WHITESPACE@2..3 " "
NAME@3..4
IDENT@3..4 "f"
PARAM_LIST@4..6
L_PAREN@4..5 "("
R_PAREN@5..6 ")"
WHITESPACE@6..7 " "
BLOCK_EXPR@7..27
L_CURLY@7..8 "{"
WHITESPACE@8..9 " "
LET_STMT@9..25
LET_KW@9..12 "let"
WHITESPACE@12..13 " "
RANGE_PAT@13..17
LITERAL_PAT@13..14
LITERAL@13..14
INT_NUMBER@13..14 "0"
WHITESPACE@14..15 " "
DOT2@15..17 ".."
WHITESPACE@17..18 " "
EQ@18..19 "="
WHITESPACE@19..20 " "
LITERAL@20..24
INT_NUMBER@20..24 "1u32"
SEMICOLON@24..25 ";"
WHITESPACE@25..26 " "
R_CURLY@26..27 "}"
WHITESPACE@27..28 "\n"

View File

@ -0,0 +1 @@
fn f() { let 0 .. = 1u32; }