Skip to content

libsyntax: small refactorings #51945

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 33 additions & 67 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1029,6 +1029,13 @@ impl<'a> Parser<'a> {
}
}

fn check_tokens(&mut self, kets: &[&token::Token], expect: TokenExpectType) -> bool {
kets.iter().any(|k| match expect {
TokenExpectType::Expect => self.check(k),
TokenExpectType::NoExpect => self.token == **k,
})
}

/// Eat and discard tokens until one of `kets` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery.
fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
Expand Down Expand Up @@ -1080,12 +1087,7 @@ impl<'a> Parser<'a> {
{
let mut first: bool = true;
let mut v = vec![];
while !kets.iter().any(|k| {
match expect {
TokenExpectType::Expect => self.check(k),
TokenExpectType::NoExpect => self.token == **k,
}
}) {
while !self.check_tokens(kets, expect) {
match self.token {
token::CloseDelim(..) | token::Eof => break,
_ => {}
Expand Down Expand Up @@ -1116,12 +1118,7 @@ impl<'a> Parser<'a> {
}
}
}
if sep.trailing_sep_allowed && kets.iter().any(|k| {
match expect {
TokenExpectType::Expect => self.check(k),
TokenExpectType::NoExpect => self.token == **k,
}
}) {
if sep.trailing_sep_allowed && self.check_tokens(kets, expect) {
break;
}

Expand All @@ -1145,9 +1142,7 @@ impl<'a> Parser<'a> {
{
self.expect(bra)?;
let result = self.parse_seq_to_before_end(ket, sep, f)?;
if self.token == *ket {
self.bump();
}
self.eat(ket);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should change diagnostics by adding ket to the expected set, but it looks like a desirable change in this case.

Ok(result)
}

Expand Down Expand Up @@ -1343,8 +1338,7 @@ impl<'a> Parser<'a> {
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
let default = if self.check(&token::Eq) {
self.bump();
let default = if self.eat(&token::Eq) {
let expr = self.parse_expr()?;
self.expect(&token::Semi)?;
Some(expr)
Expand Down Expand Up @@ -1829,8 +1823,7 @@ impl<'a> Parser<'a> {
} else if self.eat_keyword(keywords::False) {
LitKind::Bool(false)
} else {
let lit = self.parse_lit_token()?;
lit
self.parse_lit_token()?
};
Ok(codemap::Spanned { node: lit, span: lo.to(self.prev_span) })
}
Expand All @@ -1841,10 +1834,8 @@ impl<'a> Parser<'a> {

let minus_lo = self.span;
let minus_present = self.eat(&token::BinOp(token::Minus));
let lo = self.span;
let literal = P(self.parse_lit()?);
let hi = self.prev_span;
let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new());
let expr = self.mk_expr(literal.span, ExprKind::Lit(literal), ThinVec::new());

if minus_present {
let minus_hi = self.prev_span;
Expand Down Expand Up @@ -1940,17 +1931,7 @@ impl<'a> Parser<'a> {
/// Like `parse_path`, but also supports parsing `Word` meta items into paths for back-compat.
/// This is used when parsing derive macro paths in `#[derive]` attributes.
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
let meta_ident = match self.token {
token::Interpolated(ref nt) => match nt.0 {
token::NtMeta(ref meta) => match meta.node {
ast::MetaItemKind::Word => Some(meta.ident.clone()),
_ => None,
},
_ => None,
},
_ => None,
};
if let Some(path) = meta_ident {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Outlining a small function that's used only once doesn't look like a good style to me.
I'd remove this commit.

if let Some(path) = self.token.to_meta_ident() {
self.bump();
return Ok(path);
}
Expand All @@ -1975,20 +1956,16 @@ impl<'a> Parser<'a> {
-> PResult<'a, PathSegment> {
let ident = self.parse_path_segment_ident()?;

let is_args_start = |token: &token::Token| match *token {
token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) => true,
_ => false,
};
let check_args_start = |this: &mut Self| {
this.expected_tokens.extend_from_slice(
&[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))]
);
is_args_start(&this.token)
this.token.is_args_start()
};

Ok(if style == PathStyle::Type && check_args_start(self) ||
style != PathStyle::Mod && self.check(&token::ModSep)
&& self.look_ahead(1, |t| is_args_start(t)) {
&& self.look_ahead(1, |t| t.is_args_start()) {
// Generic arguments are found - `<`, `(`, `::<` or `::(`.
let lo = self.span;
if self.eat(&token::ModSep) && style == PathStyle::Type && enable_warning {
Expand Down Expand Up @@ -2203,10 +2180,8 @@ impl<'a> Parser<'a> {
while self.token != token::CloseDelim(token::Paren) {
es.push(self.parse_expr()?);
self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?;
if self.check(&token::Comma) {
if self.eat(&token::Comma) {
trailing_comma = true;

self.bump();
} else {
trailing_comma = false;
break;
Expand All @@ -2232,25 +2207,22 @@ impl<'a> Parser<'a> {

attrs.extend(self.parse_inner_attributes()?);

if self.check(&token::CloseDelim(token::Bracket)) {
if self.eat(&token::CloseDelim(token::Bracket)) {
// Empty vector.
self.bump();
ex = ExprKind::Array(Vec::new());
} else {
// Nonempty vector.
let first_expr = self.parse_expr()?;
if self.check(&token::Semi) {
if self.eat(&token::Semi) {
// Repeating array syntax: [ 0; 512 ]
self.bump();
let count = AnonConst {
id: ast::DUMMY_NODE_ID,
value: self.parse_expr()?,
};
self.expect(&token::CloseDelim(token::Bracket))?;
ex = ExprKind::Repeat(first_expr, count);
} else if self.check(&token::Comma) {
} else if self.eat(&token::Comma) {
// Vector with two or more elements.
self.bump();
let remaining_exprs = self.parse_seq_to_end(
&token::CloseDelim(token::Bracket),
SeqSep::trailing_allowed(token::Comma),
Expand Down Expand Up @@ -2484,14 +2456,13 @@ impl<'a> Parser<'a> {
}
}

match self.expect_one_of(&[token::Comma],
&[token::CloseDelim(token::Brace)]) {
Ok(()) => {}
Err(mut e) => {
e.emit();
self.recover_stmt();
break;
}
if let Err(mut e) = self.expect_one_of(
&[token::Comma],
&[token::CloseDelim(token::Brace)],
) {
e.emit();
self.recover_stmt();
break;
}
}

Expand Down Expand Up @@ -2873,7 +2844,7 @@ impl<'a> Parser<'a> {
return self.parse_dot_or_call_expr(Some(attrs));
}
}
_ => { return self.parse_dot_or_call_expr(Some(attrs)); }
_ => return self.parse_dot_or_call_expr(Some(attrs)),
};
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
Expand Down Expand Up @@ -2991,7 +2962,7 @@ impl<'a> Parser<'a> {
RangeLimits::Closed
};

let r = try!(self.mk_range(Some(lhs), rhs, limits));
let r = self.mk_range(Some(lhs), rhs, limits)?;
lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new());
break
}
Expand Down Expand Up @@ -3199,9 +3170,7 @@ impl<'a> Parser<'a> {
RangeLimits::Closed
};

let r = try!(self.mk_range(None,
opt_end,
limits));
let r = self.mk_range(None, opt_end, limits)?;
Ok(self.mk_expr(lo.to(hi), r, attrs))
}

Expand Down Expand Up @@ -3553,8 +3522,7 @@ impl<'a> Parser<'a> {

/// Parse the RHS of a local variable declaration (e.g. '= 14;')
fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
if self.check(&token::Eq) {
self.bump();
if self.eat(&token::Eq) {
Ok(Some(self.parse_expr()?))
} else if skip_eq {
Ok(Some(self.parse_expr()?))
Expand All @@ -3580,8 +3548,7 @@ impl<'a> Parser<'a> {
);
err.emit();
self.bump();
} else if self.check(&token::BinOp(token::Or)) {
self.bump();
} else if self.eat(&token::BinOp(token::Or)) {
} else {
return Ok(pats);
}
Expand Down Expand Up @@ -6178,8 +6145,7 @@ impl<'a> Parser<'a> {

let id_span = self.span;
let id = self.parse_ident()?;
if self.check(&token::Semi) {
self.bump();
if self.eat(&token::Semi) {
if in_cfg && self.recurse_into_file_modules {
// This mod is in an external file. Let's go get it!
let ModulePathSuccess { path, directory_ownership, warn } =
Expand Down
17 changes: 16 additions & 1 deletion src/libsyntax/parse/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub use self::DelimToken::*;
pub use self::Lit::*;
pub use self::Token::*;

use ast::{self};
use ast;
use parse::ParseSess;
use print::pprust;
use ptr::P;
Expand Down Expand Up @@ -224,6 +224,17 @@ impl Token {
}
}

crate fn to_meta_ident(&self) -> Option<ast::Path> {
if let Interpolated(ref nt) = self {
if let NtMeta(ref meta) = nt.0 {
if let ast::MetaItemKind::Word = meta.node {
return Some(meta.ident.clone());
}
}
}
None
}

/// Returns `true` if the token can appear at the start of an expression.
crate fn can_begin_expr(&self) -> bool {
match *self {
Expand Down Expand Up @@ -365,6 +376,10 @@ impl Token {
self == &Lt || self == &BinOp(Shl)
}

crate fn is_args_start(&self) -> bool {
self.is_qpath_start() || self == &OpenDelim(Paren)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well, the tokens technically coincide with the is_qpath_start set, but </<< in Type<T>/Type<<X as Y>::Assoc> is not a qpath start at all.
I'd remove this commit.

}

crate fn is_path_start(&self) -> bool {
self == &ModSep || self.is_qpath_start() || self.is_path() ||
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
Expand Down