Skip to content

Commit 999c0fc

Browse files
committed
Don’t cache skipped tokens
1 parent 336a12e commit 999c0fc

File tree

1 file changed

+45
-40
lines changed

1 file changed

+45
-40
lines changed

src/parser.rs

Lines changed: 45 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -80,11 +80,6 @@ impl<'i> ParserInput<'i> {
8080
cached_token: None,
8181
}
8282
}
83-
84-
#[inline]
85-
fn cached_token_ref(&self) -> &Token<'i> {
86-
&self.cached_token.as_ref().unwrap().token
87-
}
8883
}
8984

9085
/// A CSS parser that borrows its `&str` input,
@@ -332,27 +327,15 @@ impl<'i: 't, 't> Parser<'i, 't> {
332327
/// See the `Parser::parse_nested_block` method to parse the content of functions or blocks.
333328
///
334329
/// This only returns a closing token when it is unmatched (and therefore an error).
330+
#[inline]
335331
pub fn next(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
336-
loop {
337-
match self.next_including_whitespace_and_comments() {
338-
Err(e) => return Err(e),
339-
Ok(&Token::WhiteSpace(_)) | Ok(&Token::Comment(_)) => {},
340-
_ => break
341-
}
342-
}
343-
Ok(self.input.cached_token_ref())
332+
self.next_common(true, true)
344333
}
345334

346335
/// Same as `Parser::next`, but does not skip whitespace tokens.
336+
#[inline]
347337
pub fn next_including_whitespace(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
348-
loop {
349-
match self.next_including_whitespace_and_comments() {
350-
Err(e) => return Err(e),
351-
Ok(&Token::Comment(_)) => {},
352-
_ => break
353-
}
354-
}
355-
Ok(self.input.cached_token_ref())
338+
self.next_common(false, true)
356339
}
357340

358341
/// Same as `Parser::next`, but does not skip whitespace or comment tokens.
@@ -361,37 +344,59 @@ impl<'i: 't, 't> Parser<'i, 't> {
361344
/// where comments are preserved.
362345
/// When parsing higher-level values, per the CSS Syntax specification,
363346
/// comments should always be ignored between tokens.
347+
#[inline]
364348
pub fn next_including_whitespace_and_comments(&mut self) -> Result<&Token<'i>, BasicParseError<'i>> {
349+
self.next_common(false, false)
350+
}
351+
352+
fn next_common(&mut self, skip_whitespace: bool, skip_comments: bool)
353+
-> Result<&Token<'i>, BasicParseError<'i>> {
354+
let return_this_token = |token: &Token| {
355+
match *token {
356+
Token::WhiteSpace(_) if skip_whitespace => false,
357+
Token::Comment(_) if skip_comments => false,
358+
_ => true
359+
}
360+
};
361+
365362
if let Some(block_type) = self.at_start_of.take() {
366363
consume_until_end_of_block(block_type, &mut self.input.tokenizer);
367364
}
368365

369-
let byte = self.input.tokenizer.next_byte();
370-
if self.stop_before.contains(Delimiters::from_byte(byte)) {
371-
return Err(BasicParseError::EndOfInput)
372-
}
373-
374-
let token_start_position = self.input.tokenizer.position();
375-
let token;
376-
match self.input.cached_token {
377-
Some(ref cached_token) if cached_token.start_position == token_start_position => {
378-
self.input.tokenizer.reset(cached_token.end_position);
379-
token = &cached_token.token
366+
loop {
367+
let byte = self.input.tokenizer.next_byte();
368+
if self.stop_before.contains(Delimiters::from_byte(byte)) {
369+
return Err(BasicParseError::EndOfInput)
380370
}
381-
_ => {
382-
let new_token = self.input.tokenizer.next().map_err(|()| BasicParseError::EndOfInput)?;
383-
self.input.cached_token = Some(CachedToken {
384-
token: new_token,
385-
start_position: token_start_position,
386-
end_position: self.input.tokenizer.position(),
387-
});
388-
token = self.input.cached_token_ref()
371+
372+
let token_start_position = self.input.tokenizer.position();
373+
match self.input.cached_token {
374+
Some(ref cached_token) if cached_token.start_position == token_start_position => {
375+
self.input.tokenizer.reset(cached_token.end_position);
376+
if return_this_token(&cached_token.token) {
377+
break
378+
}
379+
}
380+
_ => {
381+
let new_token = self.input.tokenizer.next().map_err(|()| BasicParseError::EndOfInput)?;
382+
if return_this_token(&new_token) {
383+
self.input.cached_token = Some(CachedToken {
384+
token: new_token,
385+
start_position: token_start_position,
386+
end_position: self.input.tokenizer.position(),
387+
});
388+
break
389+
}
390+
}
389391
}
390392
}
391393

394+
let token = &self.input.cached_token.as_ref().unwrap().token;
395+
392396
if let Some(block_type) = BlockType::opening(token) {
393397
self.at_start_of = Some(block_type);
394398
}
399+
395400
Ok(token)
396401
}
397402

0 commit comments

Comments
 (0)