diff --git a/src/cow_rc_str.rs b/src/cow_rc_str.rs index 26508481..03631f47 100644 --- a/src/cow_rc_str.rs +++ b/src/cow_rc_str.rs @@ -51,7 +51,7 @@ impl<'a> From<&'a str> for CowRcStr<'a> { } } -impl<'a> From for CowRcStr<'a> { +impl From for CowRcStr<'_> { #[inline] fn from(s: String) -> Self { CowRcStr::from_rc(Rc::new(s)) @@ -84,7 +84,7 @@ impl<'a> CowRcStr<'a> { } } -impl<'a> Clone for CowRcStr<'a> { +impl Clone for CowRcStr<'_> { #[inline] fn clone(&self) -> Self { match self.unpack() { @@ -99,7 +99,7 @@ impl<'a> Clone for CowRcStr<'a> { } } -impl<'a> Drop for CowRcStr<'a> { +impl Drop for CowRcStr<'_> { #[inline] fn drop(&mut self) { if let Err(ptr) = self.unpack() { @@ -108,7 +108,7 @@ impl<'a> Drop for CowRcStr<'a> { } } -impl<'a> ops::Deref for CowRcStr<'a> { +impl ops::Deref for CowRcStr<'_> { type Target = str; #[inline] @@ -119,65 +119,65 @@ impl<'a> ops::Deref for CowRcStr<'a> { // Boilerplate / trivial impls below. -impl<'a> AsRef for CowRcStr<'a> { +impl AsRef for CowRcStr<'_> { #[inline] fn as_ref(&self) -> &str { self } } -impl<'a> Borrow for CowRcStr<'a> { +impl Borrow for CowRcStr<'_> { #[inline] fn borrow(&self) -> &str { self } } -impl<'a> Default for CowRcStr<'a> { +impl Default for CowRcStr<'_> { #[inline] fn default() -> Self { Self::from("") } } -impl<'a> hash::Hash for CowRcStr<'a> { +impl hash::Hash for CowRcStr<'_> { #[inline] fn hash(&self, hasher: &mut H) { str::hash(self, hasher) } } -impl<'a, T: AsRef> PartialEq for CowRcStr<'a> { +impl> PartialEq for CowRcStr<'_> { #[inline] fn eq(&self, other: &T) -> bool { str::eq(self, other.as_ref()) } } -impl<'a, T: AsRef> PartialOrd for CowRcStr<'a> { +impl> PartialOrd for CowRcStr<'_> { #[inline] fn partial_cmp(&self, other: &T) -> Option { str::partial_cmp(self, other.as_ref()) } } -impl<'a> Eq for CowRcStr<'a> {} +impl Eq for CowRcStr<'_> {} -impl<'a> Ord for CowRcStr<'a> { +impl Ord for CowRcStr<'_> { #[inline] fn cmp(&self, other: &Self) -> cmp::Ordering { str::cmp(self, other) } } -impl<'a> fmt::Display for CowRcStr<'a> { +impl fmt::Display for CowRcStr<'_> { #[inline] fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { str::fmt(self, formatter) } } -impl<'a> fmt::Debug for CowRcStr<'a> { +impl fmt::Debug for CowRcStr<'_> { #[inline] fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { str::fmt(self, formatter) diff --git a/src/parser.rs b/src/parser.rs index dd35fc50..0a432912 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -76,7 +76,7 @@ pub enum BasicParseErrorKind<'i> { QualifiedRuleInvalid, } -impl<'i> fmt::Display for BasicParseErrorKind<'i> { +impl fmt::Display for BasicParseErrorKind<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { BasicParseErrorKind::UnexpectedToken(token) => { @@ -176,7 +176,7 @@ impl<'i, T> ParseErrorKind<'i, T> { } } -impl<'i, E: fmt::Display> fmt::Display for ParseErrorKind<'i, E> { +impl fmt::Display for ParseErrorKind<'_, E> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ParseErrorKind::Basic(ref basic) => basic.fmt(f), @@ -218,13 +218,13 @@ impl<'i, T> ParseError<'i, T> { } } -impl<'i, E: fmt::Display> fmt::Display for ParseError<'i, E> { +impl fmt::Display for ParseError<'_, E> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.kind.fmt(f) } } -impl<'i, E: fmt::Display + fmt::Debug> std::error::Error for ParseError<'i, E> {} +impl std::error::Error for ParseError<'_, E> {} /// The owned input for a parser. pub struct ParserInput<'i> { diff --git a/src/rules_and_declarations.rs b/src/rules_and_declarations.rs index 48da02b5..188e354e 100644 --- a/src/rules_and_declarations.rs +++ b/src/rules_and_declarations.rs @@ -241,7 +241,7 @@ impl<'i, 't, 'a, P, I, E> RuleBodyParser<'i, 't, 'a, P, I, E> { } /// https://drafts.csswg.org/css-syntax/#consume-a-blocks-contents -impl<'i, 't, 'a, I, P, E: 'i> Iterator for RuleBodyParser<'i, 't, 'a, P, I, E> +impl<'i, I, P, E: 'i> Iterator for RuleBodyParser<'i, '_, '_, P, I, E> where P: RuleBodyItemParser<'i, I, E>, { @@ -349,7 +349,7 @@ where } /// `RuleListParser` is an iterator that yields `Ok(_)` for a rule or an `Err(..)` for an invalid one. -impl<'i, 't, 'a, R, P, E: 'i> Iterator for StyleSheetParser<'i, 't, 'a, P> +impl<'i, R, P, E: 'i> Iterator for StyleSheetParser<'i, '_, '_, P> where P: QualifiedRuleParser<'i, QualifiedRule = R, Error = E> + AtRuleParser<'i, AtRule = R, Error = E>, diff --git a/src/serializer.rs b/src/serializer.rs index 3c6e31cb..5df73954 100644 --- a/src/serializer.rs +++ b/src/serializer.rs @@ -55,7 +55,7 @@ where Ok(()) } -impl<'a> ToCss for Token<'a> { +impl ToCss for Token<'_> { fn to_css(&self, dest: &mut W) -> fmt::Result where W: fmt::Write, @@ -311,7 +311,7 @@ where } } -impl<'a, W> fmt::Write for CssStringWriter<'a, W> +impl fmt::Write for CssStringWriter<'_, W> where W: fmt::Write, { @@ -539,7 +539,7 @@ impl TokenSerializationType { } } -impl<'a> Token<'a> { +impl Token<'_> { /// Categorize a token into a type that determines when `/**/` needs to be inserted /// between two tokens when serialized next to each other without whitespace in between. /// diff --git a/src/tests.rs b/src/tests.rs index 7389664d..ec0fc517 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -773,7 +773,7 @@ where } } -impl<'a> ToJson for CowRcStr<'a> { +impl ToJson for CowRcStr<'_> { fn to_json(&self) -> Value { let s: &str = self; s.to_json() @@ -946,7 +946,7 @@ impl<'i> QualifiedRuleParser<'i> for JsonParser { } } -impl<'i> RuleBodyItemParser<'i, Value, ()> for JsonParser { +impl RuleBodyItemParser<'_, Value, ()> for JsonParser { fn parse_qualified(&self) -> bool { true } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index ea173a5e..2e86c66a 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -190,7 +190,7 @@ pub enum Token<'a> { CloseCurlyBracket, } -impl<'a> Token<'a> { +impl Token<'_> { /// Return whether this token represents a parse error. /// /// `BadUrl` and `BadString` are tokenizer-level parse errors. @@ -324,11 +324,11 @@ impl<'a> Tokenizer<'a> { let current = self.position(); let start = self .slice(SourcePosition(0)..current) - .rfind(|c| matches!(c, '\r' | '\n' | '\x0C')) + .rfind(['\r', '\n', '\x0C']) .map_or(0, |start| start + 1); let end = self .slice(current..SourcePosition(self.input.len())) - .find(|c| matches!(c, '\r' | '\n' | '\x0C')) + .find(['\r', '\n', '\x0C']) .map_or(self.input.len(), |end| current.0 + end); self.slice(SourcePosition(start)..SourcePosition(end)) } @@ -720,9 +720,7 @@ fn check_for_source_map<'a>(tokenizer: &mut Tokenizer<'a>, contents: &'a str) { // If there is a source map directive, extract the URL. if contents.starts_with(directive) || contents.starts_with(directive_old) { let contents = &contents[directive.len()..]; - tokenizer.source_map_url = contents - .split(|c| c == ' ' || c == '\t' || c == '\x0C' || c == '\r' || c == '\n') - .next() + tokenizer.source_map_url = contents.split([' ', '\t', '\x0C', '\r', '\n']).next(); } let directive = "# sourceURL="; @@ -731,9 +729,7 @@ fn check_for_source_map<'a>(tokenizer: &mut Tokenizer<'a>, contents: &'a str) { // If there is a source map directive, extract the URL. if contents.starts_with(directive) || contents.starts_with(directive_old) { let contents = &contents[directive.len()..]; - tokenizer.source_url = contents - .split(|c| c == ' ' || c == '\t' || c == '\x0C' || c == '\r' || c == '\n') - .next() + tokenizer.source_url = contents.split([' ', '\t', '\x0C', '\r', '\n']).next() } }