diff --git a/Cargo.toml b/Cargo.toml index 4cec9a3..04b8e7c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,11 +17,15 @@ regex = "1" pest = "2.7.15" pest_derive = "2.7.15" thiserror = "2.0.9" +jsonpath-rust-impl = {path = "jsonpath-rust-impl", optional = true} +jsonpath-ast = {path = "jsonpath-ast"} [dev-dependencies] serde = { version = "1.0", features = ["derive"] } criterion = "0.5.1" +[features] +compiled-path = ["jsonpath-ast/compiled-path", "dep:jsonpath-rust-impl"] [[bench]] name = "regex" diff --git a/README.md b/README.md index bb713bb..8d2f709 100644 --- a/README.md +++ b/README.md @@ -251,6 +251,27 @@ The path is supported with the limited elements namely only the elements with th } ``` +### Compiled Paths +🚧Under Construction: Unstable/Unimplemented🚧 + +By enabling the `compiled-path` feature, the following syntax becomes available: +```rust +fn macros() { + // Existing + let vec = js_path("$.values[?match(@, $.regex)]", &json)?; + // New + let q_ast: JpQuery = ::jsonpath_rust::json_query!($.values[?match(@, $.regex)]); +} +``` + +This allows for query strings to be created infallibly at compile time for applications where query strings will be static strings in source code. + +#### Limitations Of Compiled Path Queries +- Single quote strings are not allowed, however to replace this, rust's [raw string literals](https://doc.rust-lang.org/rust-by-example/std/str.html) such as `r"# ... #"` can be used. +- The macro does not check whitespace, this means that with respect to whitespace, the domain of strings accepted by the macro is a superset of those accepted by the original RFC. +- Due to [constraints on rust identifiers](https://internals.rust-lang.org/t/supporting-emoji-in-identifiers/16838), emoji in member name shorthands such as `json_query!( $.☺ )` are not allowed + - Unicode characters still work in both string literals and bracket field access, ie: `json_query!( $["☺"] )` + ### Python bindings Python bindings ([jsonpath-rust-bindings](https://github.com/night-crawler/jsonpath-rust-bindings)) are available on diff --git a/jsonpath-ast/Cargo.toml b/jsonpath-ast/Cargo.toml new file mode 100644 index 0000000..121d6ac --- /dev/null +++ b/jsonpath-ast/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "jsonpath-ast" +version = "0.1.0" +edition = "2024" + +[dependencies] +proc-macro2 = { version = "1.0.95", features = ["span-locations"] } +pest = "2.7.15" +pest_derive = "2.7.15" +syn = { version = "2.0.101", features = ["default", "extra-traits"] } +pest-ast = "0.3.5" +from-pest = "0.3.3" +syn_derive = { version = "0.2.0", optional = true } +quote = "1.0.40" +derive-new = "0.7.0" + +[features] +compiled-path = ["dep:syn_derive"] diff --git a/jsonpath-ast/src/ast.rs b/jsonpath-ast/src/ast.rs new file mode 100644 index 0000000..af316b7 --- /dev/null +++ b/jsonpath-ast/src/ast.rs @@ -0,0 +1,888 @@ +pub mod parse { + use pest_derive::Parser; + #[derive(Parser)] + #[grammar = "../../jsonpath-rust/src/parser/grammar/json_path_9535.pest"] + pub struct JSPathParser; +} + +pub(crate) mod kw { + // syn::custom_keyword!(in); + syn::custom_keyword!(nin); + syn::custom_keyword!(size); + syn::custom_keyword!(none_of); + syn::custom_keyword!(any_of); + syn::custom_keyword!(subset_of); + + syn::custom_keyword!(length); + syn::custom_keyword!(value); + syn::custom_keyword!(count); + syn::custom_keyword!(search); + // reserved + // syn::custom_keyword!(match); + + syn::custom_keyword!(null); +} + +macro_rules! terminating_from_pest { + ($wrap:ty, $rule:path, $parser:expr) => { + #[automatically_derived] + impl<'pest> ::from_pest::FromPest<'pest> for $wrap { + type Rule = Rule; + type FatalError = ::from_pest::Void; + fn from_pest( + pest: &mut ::from_pest::pest::iterators::Pairs<'pest, Rule>, + ) -> ::std::result::Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(::from_pest::ConversionError::NoMatch)?; + if pair.as_rule() == $rule { + let mut inner = pair.clone().into_inner(); + let inner = &mut inner; + let this = $parser(pair); + if inner.clone().next().is_some() { + ::from_pest::log::trace!( + "when converting {}, found extraneous {:?}", + stringify!($wrap), + inner + ); + Err(::from_pest::ConversionError::Extraneous { + current_node: stringify!($wrap), + })?; + } + *pest = clone; + Ok(this) + } else { + Err(::from_pest::ConversionError::NoMatch) + } + } + } + }; +} + +use super::parse::{JSPathParser, Rule}; +#[cfg(feature = "compiled-path")] +use crate::syn_parse::parse_impl::{ + ParseUtilsExt, parse_bool, parse_float, validate_function_name, validate_js_int, + validate_js_str, validate_member_name_shorthand, +}; +use derive_new::new; +use from_pest::{ConversionError, FromPest, Void}; +use pest::Parser; +use pest::iterators::{Pair, Pairs}; +use pest_ast::FromPest; +use proc_macro2::Span; +#[allow(unused_imports)] +use syn::LitBool; +#[cfg(feature = "compiled-path")] +use syn::parse::ParseStream; +use syn::punctuated::Punctuated; +use syn::token::Bracket; +use syn::{Ident, Token, token}; +#[cfg(feature = "compiled-path")] +use syn_derive::Parse; + +pub trait KnowsRule { + const RULE: Rule; +} + +#[derive(Debug, new, PartialEq)] +pub struct PestIgnoredPunctuated(pub(crate) Punctuated); + +impl<'pest, T, P> FromPest<'pest> for PestIgnoredPunctuated +where + T: FromPest<'pest, Rule = Rule, FatalError = Void> + KnowsRule + std::fmt::Debug, + P: Default, +{ + type Rule = Rule; + type FatalError = Void; + + fn from_pest( + pest: &mut Pairs<'pest, Self::Rule>, + ) -> Result> { + let parsed_items = Vec::::from_pest(pest)?; + + Ok(PestIgnoredPunctuated(Punctuated::from_iter( + parsed_items.into_iter(), + ))) + } +} + +/// Allows for syn to parse things that pest checks but does not store as rules +#[derive(Debug, Default, new, PartialEq)] +pub struct PestLiteralWithoutRule(pub(crate) T); + +impl From for PestLiteralWithoutRule { + fn from(value: T) -> Self { + Self(value) + } +} +impl<'pest, T: Default> FromPest<'pest> for PestLiteralWithoutRule { + type Rule = Rule; + type FatalError = Void; + + /// Always generates default value and leaves parse stream alone + fn from_pest( + _pest: &mut Pairs<'pest, Self::Rule>, + ) -> Result> { + Ok(PestLiteralWithoutRule(T::default())) + } +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::main))] +pub struct Main { + pub(crate) jp_query: JPQuery, + pub(crate) eoi: EOI, +} + +impl Main { + /// Convenience function to allow tests to not import pest::parser::Parser + pub fn try_from_pest_parse(str: &str) -> Result { + let mut rules = JSPathParser::parse(Rule::main, str).map_err(|_| ())?; + // *IF* the FromPest implementations are correctly written then Main::from_pest *cannot fail* + Main::from_pest(&mut rules).map_err(|_| ()) + } +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[pest_ast(rule(Rule::EOI))] +pub struct EOI; + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::jp_query))] +pub struct JPQuery { + pub(crate) root: PestLiteralWithoutRule, + pub(crate) segments: Segments, +} + +#[derive(Debug, new, PartialEq, Default)] +pub struct Root; + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::segments))] +pub struct Segments { + #[cfg_attr(feature = "compiled-path", parse(Segment::parse_outer))] + pub(crate) segments: Vec, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::segment))] +pub enum Segment { + // THIS MUST BE FIRST + #[cfg_attr(feature = "compiled-path", parse(peek_func = DescendantSegment::peek))] + Descendant( + PestLiteralWithoutRule, + PestLiteralWithoutRule, + DescendantSegment, + ), + #[cfg_attr(feature = "compiled-path", parse(peek_func = ChildSegment::peek))] + Child(ChildSegment), +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::child_segment))] +pub enum ChildSegment { + #[cfg_attr(feature = "compiled-path", parse(peek_func = BracketedSelection::peek))] + Bracketed(BracketedSelection), + // search for `[` or `.`(must NOT be `..` because that is a descendant segment but syn will parse that as `..` not 2 periods) + #[cfg_attr(feature = "compiled-path", parse(peek = Token![.]))] + WildcardOrShorthand( + PestLiteralWithoutRule, + WildcardSelectorOrMemberNameShorthand, + ), +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub struct BracketedSelection { + #[cfg_attr(feature = "compiled-path", syn(bracketed))] + pub(crate) arg_bracket: token::Bracket, + #[cfg_attr(feature = "compiled-path", syn(in = arg_bracket))] + #[cfg_attr(feature = "compiled-path", parse(|i: ParseStream| PestIgnoredPunctuated::parse_separated_nonempty(i) + ))] + pub(crate) selectors: PestIgnoredPunctuated, +} + +impl<'pest> from_pest::FromPest<'pest> for BracketedSelection { + type Rule = Rule; + type FatalError = Void; + fn from_pest(pest: &mut Pairs<'pest, Rule>) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + if pair.as_rule() == Rule::bracketed_selection { + let mut inner = pair.into_inner(); + let inner = &mut inner; + let this = BracketedSelection { + arg_bracket: Default::default(), + selectors: FromPest::from_pest(inner)?, + }; + if inner.clone().next().is_some() { + Err(ConversionError::Extraneous { + current_node: "BracketedSelection", + })?; + } + *pest = clone; + Ok(this) + } else { + Err(ConversionError::NoMatch) + } + } +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub enum WildcardSelectorOrMemberNameShorthand { + #[cfg_attr(feature = "compiled-path", parse(peek_func = WildcardSelector::peek))] + WildcardSelector(WildcardSelector), + #[cfg_attr(feature = "compiled-path", parse(peek_func = MemberNameShorthand::peek))] + MemberNameShorthand(MemberNameShorthand), +} +impl<'pest> FromPest<'pest> for WildcardSelectorOrMemberNameShorthand { + type Rule = Rule; + type FatalError = Void; + + fn from_pest( + pest: &mut Pairs<'pest, Self::Rule>, + ) -> Result> { + // let _ = pest.as_str().strip_prefix("."); + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + + match pair.as_rule() { + Rule::wildcard_selector => { + // let mut inner = pair.clone().into_inner(); + // let inner = &mut inner; + // Self is NOT actually a rule so pass pest, not inner + let this = Self::WildcardSelector(::from_pest::FromPest::from_pest(pest)?); + // if inner.clone().next().is_some() { + // ::from_pest::log::trace!( + // "when converting {}, found extraneous {:?}", + // stringify!(ChildSegment), + // stringify!(Bracketed) + // ); + // Err(ConversionError::Extraneous { + // current_node: stringify!(Bracketed), + // })?; + // } + Ok(this) + } + Rule::member_name_shorthand => { + // let mut inner = pair.clone().into_inner(); + // let inner = &mut inner; + // Self is NOT actually a rule so pass pest, not inner + let this = Self::MemberNameShorthand(::from_pest::FromPest::from_pest(pest)?); + // if inner.clone().next().is_some() { + // ::from_pest::log::trace!( + // "when converting {}, found extraneous {:?}", + // stringify!(ChildSegment), + // stringify!(Bracketed) + // ); + // Err(ConversionError::Extraneous { + // current_node: stringify!(Bracketed), + // })?; + // } + Ok(this) + } + _ => Err(ConversionError::NoMatch), + } + } +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[pest_ast(rule(Rule::wildcard_selector))] +pub struct WildcardSelector; + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub struct MemberNameShorthand { + #[cfg_attr(feature = "compiled-path", parse(validate_member_name_shorthand))] + pub(crate) name: String, +} + +impl<'pest> from_pest::FromPest<'pest> for MemberNameShorthand { + type Rule = Rule; + type FatalError = Void; + fn from_pest(pest: &mut Pairs<'pest, Rule>) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + if pair.as_rule() == Rule::member_name_shorthand { + let this = Ok(MemberNameShorthand { + name: pest.as_str().trim().to_string(), + }); + *pest = clone; + this + } else { + Err(ConversionError::NoMatch) + } + } +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub struct JSString( + #[cfg_attr(feature = "compiled-path", parse(validate_js_str))] pub(crate) String, +); + +impl<'pest> from_pest::FromPest<'pest> for JSString { + type Rule = Rule; + type FatalError = Void; + fn from_pest(pest: &mut Pairs<'pest, Rule>) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + if pair.clone().as_rule() == Rule::string { + let str = pair.as_str(); + let this = JSString(str[1..str.len() - 1].to_string()); + *pest = clone; + Ok(this) + } else { + Err(ConversionError::NoMatch) + } + } +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::descendant_segment))] +pub enum DescendantSegment { + #[cfg_attr(feature = "compiled-path", parse(peek_func = BracketedSelection::peek))] + BracketedSelection(BracketedSelection), + #[cfg_attr(feature = "compiled-path", parse(peek_func = WildcardSelector::peek))] + WildcardSelector(WildcardSelector), + #[cfg_attr(feature = "compiled-path", parse(peek_func = MemberNameShorthand::peek))] + MemberNameShorthand(MemberNameShorthand), +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::selector))] +pub enum Selector { + #[cfg_attr(feature = "compiled-path", parse(peek_func = WildcardSelector::peek))] + WildcardSelector(WildcardSelector), + #[cfg_attr(feature = "compiled-path", parse(peek_func = SliceSelector::peek))] + SliceSelector(SliceSelector), + #[cfg_attr(feature = "compiled-path", parse(peek_func = JSInt::peek))] + IndexSelector(IndexSelector), + #[cfg_attr(feature = "compiled-path", parse(peek_func = FilterSelector::peek))] + FilterSelector(FilterSelector), + // This MUST be the last element to prevent syn::Lit from catching one of the others, it's our "fallback" + #[cfg_attr(feature = "compiled-path", parse(peek_func = JSString::peek))] + NameSelector(NameSelector), +} +impl KnowsRule for Selector { + const RULE: Rule = Rule::selector; +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::slice_selector))] +pub struct SliceSelector( + #[cfg_attr(feature = "compiled-path", parse(SliceStart::maybe_parse))] + pub(crate) Option, + pub(crate) PestLiteralWithoutRule, + #[cfg_attr(feature = "compiled-path", parse(SliceEnd::maybe_parse))] pub(crate) Option, + #[cfg_attr(feature = "compiled-path", parse(SliceStep::maybe_parse))] + pub(crate) Option, +); + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::step))] +pub struct SliceStep( + pub(crate) PestLiteralWithoutRule, + pub(crate) JSInt, +); + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::start))] +pub struct SliceStart(pub(crate) JSInt); + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::end))] +pub struct SliceEnd(pub(crate) JSInt); + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::index_selector))] +pub struct IndexSelector(pub(crate) JSInt); + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::filter_selector))] +pub struct FilterSelector { + pub q: PestLiteralWithoutRule, + pub expr: LogicalExpr, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::logical_expr))] +pub struct LogicalExpr { + #[cfg_attr(feature = "compiled-path", parse(|i: ParseStream| PestIgnoredPunctuated::parse_separated_nonempty(i) + ))] + pub ands: PestIgnoredPunctuated, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::logical_expr_and))] +pub struct LogicalExprAnd { + #[cfg_attr(feature = "compiled-path", parse(|i: ParseStream| PestIgnoredPunctuated::parse_separated_nonempty(i) + ))] + pub atoms: PestIgnoredPunctuated, +} +impl KnowsRule for LogicalExprAnd { + const RULE: Rule = Rule::logical_expr_and; +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::atom_expr))] +pub enum AtomExpr { + #[cfg_attr(feature = "compiled-path", parse(peek_func = ParenExpr::peek))] + ParenExpr(ParenExpr), + #[cfg_attr(feature = "compiled-path", parse(peek_func = CompExpr::peek))] + CompExpr(CompExpr), + #[cfg_attr(feature = "compiled-path", parse(peek_func = TestExpr::peek))] + TestExpr(TestExpr), +} +impl KnowsRule for AtomExpr { + const RULE: Rule = Rule::atom_expr; +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::name_selector))] +pub struct NameSelector(pub(crate) JSString); + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub struct JSInt(#[cfg_attr(feature = "compiled-path", parse(validate_js_int))] pub(crate) i64); + +impl<'pest> FromPest<'pest> for JSInt { + type Rule = Rule; + type FatalError = Void; + + fn from_pest( + pest: &mut Pairs<'pest, Self::Rule>, + ) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + if pair.as_rule() == Rule::int { + let this = JSInt( + pair.as_str() + .trim() + .parse::() + .expect("int rule should always be a valid i64"), + ); + *pest = clone; + Ok(this) + } else { + Err(ConversionError::NoMatch) + } + } +} + +// New implementations below LLM STUFF + +#[derive(Debug, new, PartialEq, FromPest)] +#[pest_ast(rule(Rule::paren_expr))] +pub struct ParenExpr { + // #[cfg_attr(feature = "compiled-path", parse(peek_func = NotOp::peek))] + pub(crate) not_op: Option, + // #[paren] + pub(crate) paren: PestLiteralWithoutRule, + // #[inside(paren)] + pub(crate) expr: LogicalExpr, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::comp_expr))] +pub struct CompExpr { + pub(crate) left: Comparable, + pub(crate) op: CompOp, + pub(crate) right: Comparable, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::test_expr))] +pub struct TestExpr { + #[cfg_attr(feature = "compiled-path", parse(NotOp::maybe_parse))] + pub(crate) not_op: Option, + pub(crate) test: Test, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[pest_ast(rule(Rule::not_op))] +pub struct NotOp; + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::test))] +pub enum Test { + #[cfg_attr(feature = "compiled-path", parse(peek_func = RelQuery::peek))] + RelQuery(RelQuery), + #[cfg_attr(feature = "compiled-path", parse(peek_func = JPQuery::peek))] + JPQuery(JPQuery), + #[cfg_attr(feature = "compiled-path", parse(peek_func = FunctionExpr::peek))] + FunctionExpr(FunctionExpr), +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::comparable))] +pub enum Comparable { + #[cfg_attr(feature = "compiled-path", parse(peek_func = Literal::peek))] + Literal(Literal), + #[cfg_attr(feature = "compiled-path", parse(peek_func = SingularQuery::peek))] + SingularQuery(SingularQuery), + #[cfg_attr(feature = "compiled-path", parse(peek_func = FunctionExpr::peek))] + FunctionExpr(FunctionExpr), +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub enum CompOp { + #[cfg_attr(feature = "compiled-path", parse(peek = Token![==]))] + Eq(Token![==]), + #[cfg_attr(feature = "compiled-path", parse(peek = Token![!=]))] + Ne(Token![!=]), + #[cfg_attr(feature = "compiled-path", parse(peek = Token![<=]))] + Le(Token![<=]), + #[cfg_attr(feature = "compiled-path", parse(peek = Token![>=]))] + Ge(Token![>=]), + #[cfg_attr(feature = "compiled-path", parse(peek = Token![<]))] + Lt(Token![<]), + #[cfg_attr(feature = "compiled-path", parse(peek = Token![>]))] + Gt(Token![>]), + // #[cfg_attr(feature = "compiled-path", parse(peek_func = |input: ParseStream| input.peek(syn::token::In)))] + // In(syn::token::In), + // #[cfg_attr(feature = "compiled-path", parse(peek_func = |input: ParseStream| input.peek(kw::nin)))] + // Nin(kw::nin), + // #[cfg_attr(feature = "compiled-path", parse(peek_func = |input: ParseStream| input.peek(kw::size)))] + // Size(kw::size), + // #[cfg_attr(feature = "compiled-path", parse(peek_func = |input: ParseStream| input.peek(kw::noneOf)))] + // NoneOf(kw::noneOf), + // #[cfg_attr(feature = "compiled-path", parse(peek_func = |input: ParseStream| input.peek(kw::anyOf)))] + // AnyOf(kw::anyOf), + // #[cfg_attr(feature = "compiled-path", parse(peek_func = |input: ParseStream| input.peek(kw::subsetOf)))] + // SubsetOf(kw::subsetOf), +} +impl KnowsRule for CompOp { + const RULE: Rule = Rule::comp_op; +} +impl<'pest> FromPest<'pest> for CompOp { + type Rule = Rule; + type FatalError = Void; + + fn from_pest( + pest: &mut Pairs<'pest, Self::Rule>, + ) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + if pair.as_rule() == Self::RULE { + *pest = clone; + Ok(match pair.as_str() { + "==" => Self::Eq(Default::default()), + "!=" => Self::Ne(Default::default()), + "<=" => Self::Le(Default::default()), + ">=" => Self::Ge(Default::default()), + "<" => Self::Lt(Default::default()), + ">" => Self::Gt(Default::default()), + // "in" => {Self::In(Default::default())}, + // "nin" => {Self::Nin(Default::default())}, + // "size" => {Self::Size(Default::default())}, + // "noneOf" => {Self::NoneOf(Default::default())}, + // "anyOf" => {Self::AnyOf(Default::default())}, + // "subsetOf" => {Self::SubsetOf(Default::default())}, + _ => unreachable!(), + }) + } else { + Err(ConversionError::NoMatch) + } + } +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub struct FunctionExpr { + pub(crate) name: FunctionName, + #[cfg_attr(feature = "compiled-path", syn(parenthesized))] + pub(crate) paren: token::Paren, + #[cfg_attr(feature = "compiled-path", syn(in = paren))] + // #[cfg_attr(feature = "compiled-path", parse(|i: ParseStream| PestIgnoredPunctuated::parse_terminated(i)))] + pub(crate) args: PestIgnoredPunctuated, +} + +impl<'pest> from_pest::FromPest<'pest> for FunctionExpr { + type Rule = Rule; + type FatalError = Void; + fn from_pest(pest: &mut Pairs<'pest, Rule>) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + if pair.as_rule() == Rule::function_expr { + let mut inner = pair.into_inner(); + let inner = &mut inner; + let this = FunctionExpr { + name: ::from_pest::FromPest::from_pest(inner)?, + paren: Default::default(), + args: FromPest::from_pest(inner)?, + }; + if inner.clone().next().is_some() { + Err(ConversionError::Extraneous { + current_node: "FunctionExpr", + })?; + } + *pest = clone; + Ok(this) + } else { + Err(ConversionError::NoMatch) + } + } +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub struct FunctionName { + #[cfg_attr(feature = "compiled-path", parse(validate_function_name))] + name: Ident, +} + +impl<'pest> FromPest<'pest> for FunctionName { + type Rule = Rule; + type FatalError = Void; + + fn from_pest( + pest: &mut Pairs<'pest, Self::Rule>, + ) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + if matches!( + pair.as_rule(), + Rule::function_name_one_arg | Rule::function_name_two_arg + ) { + let mut inner = pair.into_inner(); + let inner = &mut inner; + let this = FunctionName { + name: Ident::new(inner.to_string().as_str().trim(), Span::call_site()), + }; + *pest = clone; + Ok(this) + } else { + Err(ConversionError::NoMatch) + } + } +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::function_argument))] +pub enum FunctionArgument { + #[cfg_attr(feature = "compiled-path", parse(peek_func = Literal::peek))] + Literal(Literal), + #[cfg_attr(feature = "compiled-path", parse(peek_func = Test::peek))] + Test(Test), + #[cfg_attr(feature = "compiled-path", parse(peek_func = LogicalExpr::peek))] + LogicalExpr(LogicalExpr), +} +impl KnowsRule for FunctionArgument { + const RULE: Rule = Rule::function_argument; +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::rel_query))] +pub struct RelQuery { + pub(crate) curr: PestLiteralWithoutRule, + pub(crate) segments: Segments, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::singular_query))] +pub enum SingularQuery { + #[cfg_attr(feature = "compiled-path", parse(peek_func = RelSingularQuery::peek))] + RelSingularQuery(RelSingularQuery), + #[cfg_attr(feature = "compiled-path", parse(peek_func = AbsSingularQuery::peek))] + AbsSingularQuery(AbsSingularQuery), +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::rel_singular_query))] +pub struct RelSingularQuery { + pub(crate) curr: PestLiteralWithoutRule, + pub(crate) segments: SingularQuerySegments, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::abs_singular_query))] +pub struct AbsSingularQuery { + pub(crate) root: PestLiteralWithoutRule, + pub(crate) segments: SingularQuerySegments, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::singular_query_segments))] +pub struct SingularQuerySegments { + #[cfg_attr(feature = "compiled-path", parse(SingularQuerySegment::parse_outer))] + pub(crate) segments: Vec, +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub enum SingularQuerySegment { + #[cfg_attr(feature = "compiled-path", parse(peek_func = NameSegment::peek))] + NameSegment(NameSegment), + #[cfg_attr(feature = "compiled-path", parse(peek_func = IndexSegment::peek))] + IndexSegment(IndexSegment), +} + +impl<'pest> FromPest<'pest> for SingularQuerySegment { + type Rule = Rule; + type FatalError = Void; + + fn from_pest( + pest: &mut Pairs<'pest, Self::Rule>, + ) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + + match pair.as_rule() { + Rule::name_segment => { + // let mut inner = pair.clone().into_inner(); + // SingularQueryStatement is NOT actually a rule so pass pest, not inner + let this = Self::NameSegment(::from_pest::FromPest::from_pest(pest)?); + Ok(this) + } + Rule::index_segment => { + // let mut inner = pair.clone().into_inner(); + // SingularQueryStatement is NOT actually a rule so pass pest, not inner + let this = Self::IndexSegment(::from_pest::FromPest::from_pest(pest)?); + Ok(this) + } + _ => Err(ConversionError::NoMatch), + } + } +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::name_segment))] +pub enum NameSegment { + #[cfg_attr(feature = "compiled-path", parse(peek = token::Bracket))] + BracketedName(BracketName), + #[cfg_attr(feature = "compiled-path", parse(peek = Token![.]))] + DotName(PestLiteralWithoutRule, MemberNameShorthand), +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[pest_ast(rule(Rule::name_selector))] +pub struct BracketName { + // #[cfg_attr(feature = "compiled-path", syn(bracketed))] + pub(crate) bracket: PestLiteralWithoutRule, + // #[cfg_attr(feature = "compiled-path", syn(in = bracket))] + pub(crate) name: JSString, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[pest_ast(rule(Rule::index_segment))] +pub struct IndexSegment { + // #[cfg_attr(feature = "compiled-path", syn(bracketed))] + pub(crate) bracket: PestLiteralWithoutRule, + // #[cfg_attr(feature = "compiled-path", syn(in = bracket))] + pub(crate) index: JSInt, +} + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::literal))] +pub enum Literal { + #[cfg_attr(feature = "compiled-path", parse(peek_func = Number::peek))] + Number(Number), + #[cfg_attr(feature = "compiled-path", parse(peek_func = JSString::peek))] + String(JSString), + #[cfg_attr(feature = "compiled-path", parse(peek = LitBool))] + Bool(Bool), + #[cfg_attr(feature = "compiled-path", parse(peek_func = Null::peek))] + Null(Null), +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub enum Number { + #[cfg_attr(feature = "compiled-path", parse(peek_func = JSInt::peek))] + Int(JSInt), + #[cfg_attr(feature = "compiled-path", parse(peek = syn::LitFloat))] + Float(#[cfg_attr(feature = "compiled-path", parse(parse_float))] f64), +} + +impl<'pest> FromPest<'pest> for Number { + type Rule = Rule; + type FatalError = Void; + fn from_pest( + pest: &mut Pairs<'pest, Self::Rule>, + ) -> Result> { + let mut clone = pest.clone(); + let pair = clone.next().ok_or(ConversionError::NoMatch)?; + + if pair.as_rule() == Rule::number { + let mut inner = pair.into_inner(); + let inner = &mut inner; + + let this = if inner.clone().count() == 1 { + let pair = inner.next().unwrap(); + if pair.as_rule() == Rule::int { + let value = pair.as_str().parse::().expect("int"); + Ok(Self::Int(JSInt(value))) + } else { + Err(ConversionError::NoMatch) + } + } else { + let mut number_str = String::new(); + for pair in &mut *inner { + number_str.push_str(pair.as_str()); + } + + let value = number_str.parse::().expect("float"); + Ok(Self::Float(value)) + }?; + if inner.next().is_some() { + from_pest::log::trace!( + "when converting {}, found extraneous {:?}", + stringify!(FunctionName), + inner + ); + Err(ConversionError::Extraneous { + current_node: stringify!(FunctionName), + })?; + } + *pest = clone; + Ok(this) + } else { + Err(ConversionError::NoMatch) + } + } +} + +#[derive(Debug, new, PartialEq)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +pub struct Bool(#[cfg_attr(feature = "compiled-path", parse(parse_bool))] pub(crate) bool); + +terminating_from_pest!(Bool, Rule::bool, |inner: Pair| Bool( + inner.as_str().trim().parse::().expect("bool") +)); + +#[derive(Debug, new, PartialEq, FromPest)] +#[cfg_attr(feature = "compiled-path", derive(Parse))] +#[pest_ast(rule(Rule::null))] +pub struct Null(pub(crate) PestLiteralWithoutRule); diff --git a/jsonpath-ast/src/lib.rs b/jsonpath-ast/src/lib.rs new file mode 100644 index 0000000..e08b6d0 --- /dev/null +++ b/jsonpath-ast/src/lib.rs @@ -0,0 +1,3 @@ +pub mod ast; +pub use ast::parse; +pub mod syn_parse; diff --git a/jsonpath-ast/src/syn_parse.rs b/jsonpath-ast/src/syn_parse.rs new file mode 100644 index 0000000..bda5ce0 --- /dev/null +++ b/jsonpath-ast/src/syn_parse.rs @@ -0,0 +1,1230 @@ +#[cfg(feature = "compiled-path")] +pub(crate) mod parse_impl { + use crate::ast::parse::{JSPathParser, Rule}; + use crate::ast::{ + AbsSingularQuery, AtomExpr, Bool, BracketName, BracketedSelection, ChildSegment, CompExpr, + Comparable, DescendantSegment, EOI, FilterSelector, FunctionArgument, FunctionExpr, + FunctionName, IndexSegment, JPQuery, JSInt, JSString, Literal, LogicalExpr, LogicalExprAnd, + MemberNameShorthand, NameSegment, NotOp, Null, Number, ParenExpr, PestIgnoredPunctuated, + PestLiteralWithoutRule, RelQuery, RelSingularQuery, Root, Segment, Segments, Selector, + SingularQuery, SingularQuerySegment, SingularQuerySegments, SliceEnd, SliceSelector, + SliceStart, SliceStep, Test, TestExpr, WildcardSelector, + WildcardSelectorOrMemberNameShorthand, + }; + use crate::ast::{CompOp, IndexSelector, Main, NameSelector, kw}; + use pest::Parser; + use proc_macro2::{Ident, TokenStream}; + use quote::{ToTokens, quote}; + use syn::parse::{Parse, ParseStream}; + use syn::punctuated::Punctuated; + use syn::token::Token; + use syn::{LitBool, LitInt, LitStr, Token, token}; + + pub trait ParseUtilsExt: Parse { + fn peek(input: ParseStream) -> bool; + fn maybe_parse(input: ParseStream) -> syn::Result> { + Ok(if Self::peek(input) { + Some(input.parse()?) + } else { + None + }) + } + + fn parse_outer(input: ParseStream) -> Result, syn::Error> { + let mut items = Vec::new(); + while Self::peek(input) { + items.push(input.parse()?); + } + Ok(items) + } + } + + impl PestIgnoredPunctuated { + pub(crate) fn parse_terminated(input: ParseStream) -> syn::Result { + Ok(PestIgnoredPunctuated(Punctuated::parse_terminated(input)?)) + } + + pub(crate) fn parse_separated_nonempty(input: ParseStream) -> syn::Result + where + P: Token, + { + let res = Punctuated::parse_separated_nonempty(input)?; + if res.is_empty() { + Err(input.error(format!( + "Expected at least one {}", + std::any::type_name::() + ))) + } else { + Ok(PestIgnoredPunctuated(res)) + } + } + } + + impl Parse for PestIgnoredPunctuated { + fn parse(input: ParseStream) -> syn::Result { + Self::parse_terminated(input) + } + } + + impl Parse for PestLiteralWithoutRule { + fn parse(input: ParseStream) -> syn::Result { + Ok(PestLiteralWithoutRule(input.parse::()?)) + } + } + impl ToTokens for PestLiteralWithoutRule { + fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) { + let Self { 0: __0 } = self; + { + { + let __expr: fn(&mut ::proc_macro2::TokenStream, _) = |tokens, val: &T| { + let mut sub = TokenStream::new(); + val.to_tokens(&mut sub); + tokens.extend( + quote! { ::jsonpath_ast::ast::PestLiteralWithoutRule::new(Default::default()) }, + ); + }; + __expr(tokens, __0) + }; + } + } + } + + impl ToTokens for Main { + fn to_tokens(&self, tokens: &mut TokenStream) { + let (mut q, mut e) = (TokenStream::new(), TokenStream::new()); + self.jp_query.to_tokens(&mut q); + self.eoi.to_tokens(&mut e); + tokens.extend(quote! { + ::jsonpath_ast::ast::Main::new( + #q, + #e, + ) + }) + } + } + impl Main { + /// Convenience function so that tests don't need to import syn + pub fn parse_syn_ast_from_string(string: String) -> Result { + syn::parse_str::
(&string).map_err(|_| ()) + } + } + + impl Parse for EOI { + fn parse(input: ParseStream) -> syn::Result { + if input.is_empty() { + Ok(Self) + } else { + Err(input.error("Unexpected token")) + } + } + } + impl ToTokens for EOI { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend(quote! {::jsonpath_ast::ast::EOI}) + } + } + + impl Parse for Root { + fn parse(input: ParseStream) -> syn::Result { + let _ = input.parse::()?; + Ok(Root) + } + } + impl ToTokens for Root { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend(quote!(::jsonpath_ast::ast::Root::new())) + } + } + + impl ToTokens for WildcardSelector { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend(quote! {::jsonpath_ast::ast::WildcardSelector}) + } + } + + impl ToTokens for NotOp { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend(quote! {::jsonpath_ast::ast::NotOp}) + } + } + + impl ParseUtilsExt for Root { + fn peek(input: ParseStream) -> bool { + input.peek(Token![$]) + } + } + + impl ToTokens for JPQuery { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { root, segments } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::JPQuery::new( + #root, + #segments, + ) + }) + } + } + + impl ParseUtilsExt for JPQuery { + fn peek(input: ParseStream) -> bool { + Root::peek(input) + } + } + + impl ToTokens for Segments { + fn to_tokens(&self, tokens: &mut TokenStream) { + let mut items = TokenStream::new(); + for item in self.segments.iter() { + item.to_tokens(&mut items); + items.extend(quote!(,)) + } + tokens.extend(quote! { + ::jsonpath_ast::ast::Segments::new( + Vec::from([#items]), + ) + }) + } + } + + impl ToTokens for ChildSegment { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Bracketed(bracketed) => { + let mut bracketed_tokens = TokenStream::new(); + bracketed.to_tokens(&mut bracketed_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::ChildSegment::Bracketed(#bracketed_tokens) + }); + } + Self::WildcardOrShorthand(dot, wildcard_or_shorthand) => { + let mut dot_tokens = TokenStream::new(); + let mut wildcard_or_shorthand_tokens = TokenStream::new(); + dot.to_tokens(&mut dot_tokens); + wildcard_or_shorthand.to_tokens(&mut wildcard_or_shorthand_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::ChildSegment::WildcardOrShorthand( + #dot_tokens, + #wildcard_or_shorthand_tokens + ) + }); + } + } + } + } + + impl ToTokens for WildcardSelectorOrMemberNameShorthand { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::WildcardSelector(wildcard) => { + let mut wildcard_tokens = TokenStream::new(); + wildcard.to_tokens(&mut wildcard_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::WildcardSelectorOrMemberNameShorthand::WildcardSelector(#wildcard_tokens) + }); + } + Self::MemberNameShorthand(shorthand) => { + let mut shorthand_tokens = TokenStream::new(); + shorthand.to_tokens(&mut shorthand_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::WildcardSelectorOrMemberNameShorthand::MemberNameShorthand(#shorthand_tokens) + }); + } + } + } + } + + impl ToTokens for MemberNameShorthand { + fn to_tokens(&self, tokens: &mut TokenStream) { + let name = &self.name; + tokens.extend(quote! { + ::jsonpath_ast::ast::MemberNameShorthand::new( + #name.to_string() + ) + }); + } + } + + impl ToTokens for BracketedSelection { + fn to_tokens(&self, tokens: &mut TokenStream) { + let mut selectors_tokens = TokenStream::new(); + self.selectors.to_tokens(&mut selectors_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::BracketedSelection::new( + Default::default(), + #selectors_tokens + ) + }); + } + } + + impl ToTokens for PestIgnoredPunctuated + where + T: ToTokens, + P: ToTokens, + { + fn to_tokens(&self, tokens: &mut TokenStream) { + let mut items = TokenStream::new(); + for item in self.0.iter() { + item.to_tokens(&mut items); + items.extend(quote!(,)) + } + tokens.extend(quote! { + ::jsonpath_ast::ast::PestIgnoredPunctuated::new(::syn::punctuated::Punctuated::from_iter(Vec::from([#items]))) + }); + } + } + + impl ToTokens for DescendantSegment { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::BracketedSelection(bracketed) => { + let mut bracketed_tokens = TokenStream::new(); + bracketed.to_tokens(&mut bracketed_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::DescendantSegment::BracketedSelection(#bracketed_tokens) + }); + } + Self::WildcardSelector(wildcard) => { + let mut wildcard_tokens = TokenStream::new(); + wildcard.to_tokens(&mut wildcard_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::DescendantSegment::WildcardSelector(#wildcard_tokens) + }); + } + Self::MemberNameShorthand(shorthand) => { + let mut shorthand_tokens = TokenStream::new(); + shorthand.to_tokens(&mut shorthand_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::DescendantSegment::MemberNameShorthand(#shorthand_tokens) + }); + } + } + } + } + + impl ToTokens for Segment { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Child(child) => { + let mut child_tokens = TokenStream::new(); + child.to_tokens(&mut child_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::Segment::new_child(#child_tokens) + }); + } + Self::Descendant(_, _, descendant) => { + let mut descendant_tokens = TokenStream::new(); + descendant.to_tokens(&mut descendant_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::Segment::new_descendant(Default::default(), Default::default(), #descendant_tokens) + }); + } + } + } + } + impl ParseUtilsExt for Segment { + fn peek(input: ParseStream) -> bool { + ChildSegment::peek(input) || DescendantSegment::peek(input) + } + } + + impl ParseUtilsExt for ChildSegment { + fn peek(input: ParseStream) -> bool { + input.peek(token::Bracket) || input.peek(Token![.]) + } + } + + impl ParseUtilsExt for BracketedSelection { + fn peek(input: ParseStream) -> bool { + input.peek(token::Bracket) + } + } + + // A named rule exists for this, so it's easier to let the FromPest automatically generate and + // just harvest the wildcard token manually in syn + impl Parse for WildcardSelector { + fn parse(input: ParseStream) -> syn::Result { + input.parse::().map(|_| WildcardSelector) + } + } + + impl ParseUtilsExt for WildcardSelector { + fn peek(input: ParseStream) -> bool { + input.peek(Token![*]) + } + } + + impl ParseUtilsExt for MemberNameShorthand { + fn peek(input: ParseStream) -> bool { + input.peek(syn::Ident) || input.peek(Token![_]) + } + } + + pub fn validate_member_name_shorthand(input: ParseStream) -> Result { + // Special case where syn treats a lone underscore as a token, not an ident + if input.peek(Token![_]) { + input.parse::()?; + return Ok("_".to_string()); + } + let ident = input.parse::()?; + match JSPathParser::parse(Rule::member_name_shorthand, &ident.to_string()) { + Ok(_) => Ok(ident.to_string()), + Err(e) => Err(syn::Error::new(ident.span(), e.to_string())), + } + } + + impl ParseUtilsExt for DescendantSegment { + fn peek(input: ParseStream) -> bool { + input.peek(Token![.]) && input.peek2(Token![.]) + } + } + + impl ParseUtilsExt for Selector { + fn peek(input: ParseStream) -> bool { + WildcardSelector::peek(input) + || (input.peek(Token![:]) || input.peek2(Token![:])) + || JSInt::peek(input) + || FilterSelector::peek(input) + || JSString::peek(input) + } + } + + impl ToTokens for Selector { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::WildcardSelector(wildcard) => { + let mut wildcard_tokens = TokenStream::new(); + wildcard.to_tokens(&mut wildcard_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::Selector::new_wildcard_selector(#wildcard_tokens) + }); + } + Self::SliceSelector(slice) => { + let mut slice_tokens = TokenStream::new(); + slice.to_tokens(&mut slice_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::Selector::new_slice_selector(#slice_tokens) + }); + } + Self::IndexSelector(index) => { + let mut index_tokens = TokenStream::new(); + index.to_tokens(&mut index_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::Selector::new_index_selector(#index_tokens) + }); + } + Self::FilterSelector(filter) => { + let mut filter_tokens = TokenStream::new(); + filter.to_tokens(&mut filter_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::Selector::new_filter_selector(#filter_tokens) + }); + } + Self::NameSelector(name) => { + let mut name_tokens = TokenStream::new(); + name.to_tokens(&mut name_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::Selector::new_name_selector(#name_tokens) + }); + } + } + } + } + + impl ToTokens for SliceSelector { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self(start, _, stop, step) = self; + let repr_start = match start { + Some(some_start) => quote! {Some(#some_start)}, + None => quote! {None}, + }; + let repr_stop = match stop { + Some(some_stop) => quote! {Some(#some_stop)}, + None => quote! {None}, + }; + let repr_step = match step { + Some(some_step) => quote! {Some(#some_step)}, + None => quote! {None}, + }; + tokens.extend(quote! { + ::jsonpath_ast::ast::SliceSelector::new( + #repr_start, + Default::default(), + #repr_stop, + #repr_step, + ) + }) + } + } + + impl ToTokens for SliceStart { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self(_0) = self; + tokens.extend(quote!(::jsonpath_ast::ast::SliceStart::new(#_0))); + } + } + impl ToTokens for SliceEnd { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self(_0) = self; + tokens.extend(quote!(::jsonpath_ast::ast::SliceEnd::new(#_0))); + } + } + + impl ToTokens for SliceStep { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self(_, _0) = self; + tokens.extend(quote!(::jsonpath_ast::ast::SliceStep::new(Default::default(), #_0))); + } + } + + impl ToTokens for IndexSelector { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self(_0) = self; + tokens.extend(quote!(::jsonpath_ast::ast::IndexSelector::new(#_0))); + } + } + + impl ToTokens for FilterSelector { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { q, expr } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::FilterSelector::new( + #q, + #expr, + ) + }); + } + } + + impl ToTokens for NameSelector { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self(_0) = self; + tokens.extend(quote!(::jsonpath_ast::ast::NameSelector::new(#_0))); + } + } + + impl ToTokens for LogicalExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { ands } = self; + tokens.extend(quote!( ::jsonpath_ast::ast::LogicalExpr::new( #ands ) )); + } + } + impl ToTokens for LogicalExprAnd { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { atoms } = self; + tokens.extend(quote!( ::jsonpath_ast::ast::LogicalExprAnd::new( #atoms ) )); + } + } + + impl ToTokens for AtomExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend(match self { + AtomExpr::ParenExpr(inner) => { + quote! { ::jsonpath_ast::ast::AtomExpr::new_paren_expr(#inner) } + } + AtomExpr::CompExpr(inner) => { + quote! { ::jsonpath_ast::ast::AtomExpr::new_comp_expr(#inner) } + } + AtomExpr::TestExpr(inner) => { + quote! { ::jsonpath_ast::ast::AtomExpr::new_test_expr(#inner) } + } + }); + } + } + + impl ToTokens for ParenExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + #[allow(unused_variables)] + let Self { + not_op, + paren, + expr, + } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::ParenExpr::new( + #not_op, + Default::default(), + #expr + )}); + } + } + impl ToTokens for CompExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { left, op, right } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::CompExpr::new( + #left, + #op, + #right + ) + }); + } + } + + impl ToTokens for Comparable { + fn to_tokens(&self, tokens: &mut TokenStream) { + let variant = match self { + Comparable::Literal(inner) => { + quote!( new_literal(#inner) ) + } + Comparable::SingularQuery(inner) => { + quote!( new_singular_query(#inner) ) + } + Comparable::FunctionExpr(inner) => { + quote!( new_function_expr(#inner) ) + } + }; + tokens.extend(quote!(::jsonpath_ast::ast::Comparable::#variant)); + } + } + + impl ToTokens for Literal { + fn to_tokens(&self, tokens: &mut TokenStream) { + let variant = match self { + Literal::Number(inner) => { + quote!(new_number(#inner)) + } + Literal::String(inner) => { + quote!(new_string(#inner)) + } + Literal::Bool(inner) => { + quote!(new_bool(#inner)) + } + Literal::Null(inner) => { + quote!(new_null(#inner)) + } + }; + tokens.extend(quote!(::jsonpath_ast::ast::Literal::#variant)) + } + } + + impl ToTokens for Number { + fn to_tokens(&self, tokens: &mut TokenStream) { + let variant = match self { + Number::Int(inner) => { + quote!(new_int(#inner)) + } + Number::Float(inner) => { + quote!(new_float(#inner)) + } + }; + tokens.extend(quote!(::jsonpath_ast::ast::Number::#variant)) + } + } + + impl ToTokens for SingularQuery { + fn to_tokens(&self, tokens: &mut TokenStream) { + let variant = match self { + SingularQuery::RelSingularQuery(inner) => { + quote!(new_rel_singular_query(#inner)) + } + SingularQuery::AbsSingularQuery(inner) => { + quote!(new_abs_singular_query(#inner)) + } + }; + tokens.extend(quote!(::jsonpath_ast::ast::SingularQuery::#variant )) + } + } + + impl ToTokens for FunctionName { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend(quote! { + ::jsonpath_ast::ast::FunctionName::new( + ::proc_macro2::Ident::new("function_name", ::proc_macro2::Span::call_site()) + ) + }); + } + } + + impl ToTokens for FunctionExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { + name, + paren: _, + args, + } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::FunctionExpr::new( + #name, + Default::default(), + #args + ) + }); + } + } + + impl ToTokens for CompOp { + fn to_tokens(&self, tokens: &mut TokenStream) { + let variant = match self { + CompOp::Eq(_) => { + quote!(new_eq) + } + CompOp::Ne(_) => { + quote!(new_ne) + } + CompOp::Le(_) => { + quote!(new_le) + } + CompOp::Ge(_) => { + quote!(new_ge) + } + CompOp::Lt(_) => { + quote!(new_lt) + } + CompOp::Gt(_) => { + quote!(new_gt) + } + }; + tokens.extend(quote!(::jsonpath_ast::ast::CompOp::#variant(Default::default()))); + } + } + + impl ToTokens for RelQuery { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { curr, segments } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::RelQuery::new( + #curr, + #segments + ) + }); + } + } + + impl ToTokens for RelSingularQuery { + fn to_tokens(&self, tokens: &mut TokenStream) { + #[allow(unused_variables)] + let Self { curr, segments } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::RelSingularQuery::new( + Default::default(), + #segments + ) + }); + } + } + + impl ToTokens for AbsSingularQuery { + fn to_tokens(&self, tokens: &mut TokenStream) { + #[allow(unused_variables)] + let Self { root, segments } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::RelSingularQuery::new( + Default::default(), + #segments + ) + }); + } + } + + impl ToTokens for SingularQuerySegments { + fn to_tokens(&self, tokens: &mut TokenStream) { + let mut out = TokenStream::new(); + for segment in self.segments.iter() { + out.extend(quote!(#segment,)); + } + tokens + .extend(quote!(::jsonpath_ast::ast::SingularQuerySegments::new(Vec::from([#out])))); + } + } + + impl ToTokens for SingularQuerySegment { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::NameSegment(segment) => { + tokens.extend(quote! { + ::jsonpath_ast::ast::SingularQuerySegment::new_name_segment(#segment) + }); + } + Self::IndexSegment(segment) => { + tokens.extend(quote! { + ::jsonpath_ast::ast::SingularQuerySegment::new_index_segment(#segment) + }); + } + } + } + } + + impl ToTokens for NameSegment { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::BracketedName(name) => { + tokens.extend(quote! { + ::jsonpath_ast::ast::NameSegment::BracketedName(#name) + }); + } + Self::DotName(_dot, shorthand) => { + tokens.extend(quote! { + ::jsonpath_ast::ast::NameSegment::DotName( + Default::default(), + #shorthand + ) + }); + } + } + } + } + + impl ToTokens for BracketName { + fn to_tokens(&self, tokens: &mut TokenStream) { + #[allow(unused_variables)] + let Self { bracket, name } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::BracketName { + bracket: Default::default(), + name: #name, + } + }); + } + } + + impl ToTokens for IndexSegment { + fn to_tokens(&self, tokens: &mut TokenStream) { + #[allow(unused_variables)] + let Self { bracket, index } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::IndexSegment { + bracket: Default::default(), + index: #index, + } + }); + } + } + + impl ToTokens for FunctionArgument { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Literal(literal) => { + let mut literal_tokens = TokenStream::new(); + literal.to_tokens(&mut literal_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::FunctionArgument::Literal(#literal_tokens) + }); + } + Self::Test(test) => { + let mut test_tokens = TokenStream::new(); + test.to_tokens(&mut test_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::FunctionArgument::Test(#test_tokens) + }); + } + Self::LogicalExpr(expr) => { + let mut expr_tokens = TokenStream::new(); + expr.to_tokens(&mut expr_tokens); + tokens.extend(quote! { + ::jsonpath_ast::ast::FunctionArgument::LogicalExpr(#expr_tokens) + }); + } + } + } + } + + impl ToTokens for Test { + fn to_tokens(&self, tokens: &mut TokenStream) { + let variant = match self { + Test::RelQuery(inner) => { + quote!(new_rel_query(#inner)) + } + Test::JPQuery(inner) => { + quote!(new_jp_query(#inner)) + } + Test::FunctionExpr(inner) => { + quote!(new_function_expr(#inner)) + } + }; + tokens.extend(quote!(::jsonpath_ast::ast::Test::#variant)); + } + } + + impl ToTokens for TestExpr { + fn to_tokens(&self, tokens: &mut TokenStream) { + let Self { not_op, test } = self; + tokens.extend(quote! { + ::jsonpath_ast::ast::TestExpr::new( + #not_op, + #test + ) + }); + } + } + + impl ParseUtilsExt for JSString { + fn peek(input: ParseStream) -> bool { + input.peek(syn::LitStr) || input.peek(syn::LitChar) + } + } + /// Validates a JSONPath string literal according to RFC 9535 + /// Control characters (U+0000 through U+001F and U+007F) are not allowed unescaped + /// in string literals, whether single-quoted or double-quoted + pub(crate) fn validate_js_str(input: ParseStream) -> Result { + let lit_str = input.parse::()?; + let s = lit_str.value(); + for (i, c) in s.chars().enumerate() { + if c <= '\u{001F}' { + return Err(syn::Error::new( + lit_str.span(), + format!( + "Invalid control character U+{:04X} at position {} in string literal", + c as u32, i + ), + )); + } + } + Ok(s) + } + + impl ParseUtilsExt for SliceSelector { + fn peek(input: ParseStream) -> bool { + input.peek(Token![:]) || input.peek2(Token![:]) + } + } + impl Parse for ParenExpr { + fn parse(input: ParseStream) -> syn::Result { + let not_op: Option = if NotOp::peek(input) { + Some(input.parse()?) + } else { + None + }; + let __paren_backing_token_stream; + let paren: PestLiteralWithoutRule = + syn::parenthesized!(__paren_backing_token_stream in input ).into(); + let expr: LogicalExpr = __paren_backing_token_stream.parse()?; + Ok(ParenExpr { + not_op, + paren, + expr, + }) + } + } + + impl Parse for NotOp { + fn parse(input: ParseStream) -> syn::Result { + input.parse::().map(|_| NotOp) + } + } + + impl Parse for BracketName { + fn parse(__input: ParseStream) -> syn::Result { + let bracket; + Ok(BracketName { + bracket: syn::bracketed!(bracket in __input ).into(), + name: bracket.parse()?, + }) + } + } + + impl Parse for IndexSegment { + fn parse(__input: ParseStream) -> syn::Result { + let bracket; + Ok(IndexSegment { + bracket: syn::bracketed!(bracket in __input ).into(), + index: bracket.parse()?, + }) + } + } + + impl ParseUtilsExt for SliceStep { + fn peek(input: ParseStream) -> bool { + input.peek(Token![:]) + } + + fn maybe_parse(input: ParseStream) -> syn::Result> { + if input.peek(Token![:]) { + let colon = input.parse()?; + if JSInt::peek(input) { + return Ok(Some(Self(colon, input.parse()?))); + } + } + Ok(None) + } + } + + impl ParseUtilsExt for SliceStart { + fn peek(input: ParseStream) -> bool { + input.peek(Token![:]) || input.peek2(Token![:]) + } + + fn maybe_parse(input: ParseStream) -> syn::Result> { + if input.peek(Token![:]) { + return Ok(None); + } else { + Ok(Some(Self(input.parse()?))) + } + } + } + + impl ParseUtilsExt for SliceEnd { + fn peek(input: ParseStream) -> bool { + JSInt::peek(input) + } + } + + impl ParseUtilsExt for FilterSelector { + fn peek(input: ParseStream) -> bool { + input.peek(Token![?]) + } + } + + impl ParseUtilsExt for LogicalExpr { + fn peek(input: ParseStream) -> bool { + LogicalExprAnd::peek(input) + } + } + + impl ParseUtilsExt for LogicalExprAnd { + fn peek(input: ParseStream) -> bool { + AtomExpr::peek(input) + } + } + + impl ParseUtilsExt for AtomExpr { + fn peek(input: ParseStream) -> bool { + ParenExpr::peek(input) || CompExpr::peek(input) || TestExpr::peek(input) + } + } + + impl ParseUtilsExt for ParenExpr { + fn peek(input: ParseStream) -> bool { + input.peek(Token![!]) || input.peek(token::Paren) + } + } + + impl ParseUtilsExt for CompExpr { + fn peek(input: ParseStream) -> bool { + Comparable::peek(input) + } + } + impl ParseUtilsExt for TestExpr { + fn peek(input: ParseStream) -> bool { + input.peek(Token![!]) || Test::peek(input) + } + } + + impl ParseUtilsExt for NotOp { + fn peek(input: ParseStream) -> bool { + input.peek(Token![!]) + } + } + + impl ParseUtilsExt for Test { + fn peek(input: ParseStream) -> bool { + RelQuery::peek(input) || JPQuery::peek(input) || FunctionExpr::peek(input) + } + } + impl ParseUtilsExt for Comparable { + fn peek(input: ParseStream) -> bool { + Literal::peek(input) || SingularQuery::peek(input) || FunctionExpr::peek(input) + } + } + impl ParseUtilsExt for JSInt { + fn peek(input: ParseStream) -> bool { + input.peek(LitInt) + } + } + + impl ToTokens for JSInt { + fn to_tokens(&self, tokens: &mut TokenStream) { + let value = self.0; + tokens.extend(quote! { + ::jsonpath_ast::ast::JSInt::new(#value) + }); + } + } + + impl ToTokens for JSString { + fn to_tokens(&self, tokens: &mut TokenStream) { + let value = &self.0; + tokens.extend(quote! { + ::jsonpath_ast::ast::JSString::new(#value.to_string()) + }); + } + } + + impl ToTokens for Bool { + fn to_tokens(&self, tokens: &mut TokenStream) { + let value = self.0; + tokens.extend(quote! { + ::jsonpath_ast::ast::Bool::new(#value) + }); + } + } + + impl ToTokens for Null { + fn to_tokens(&self, tokens: &mut TokenStream) { + tokens.extend(quote! { + ::jsonpath_ast::ast::Null::new(Default::Default()) + }); + } + } + + /// Only used by syn + pub fn validate_js_int(input: ParseStream) -> Result { + let lit_int = input.parse::()?; + let parsed = lit_int.base10_parse::()?; + Ok(common_bound_validate(parsed).map_err(|e| syn::Error::new(lit_int.span(), e))?) + } + + const MAX_VAL: i64 = 9007199254740991; // Maximum safe integer value in JavaScript + const MIN_VAL: i64 = -9007199254740991; // Minimum safe integer value in JavaScript + + /// Used by both syn ~~and pest~~(pest changed to use range constraints) + fn common_bound_validate(num: i64) -> Result { + if num > MAX_VAL || num < MIN_VAL { + let info = if num > MAX_VAL { + ("greater", "maximum", MAX_VAL) + } else { + ("less", "minimum", MIN_VAL) + }; + return Err(format!( + "number out of bounds: {} is {} than {} JS integer value: {}", + num, info.0, info.1, info.2, + )); + } + Ok(num) + } + + impl ParseUtilsExt for FunctionExpr { + fn peek(input: ParseStream) -> bool { + FunctionName::peek(input) + } + } + + impl ParseUtilsExt for FunctionName { + fn peek(input: ParseStream) -> bool { + input.peek(kw::length) + || input.peek(kw::value) + || input.peek(kw::count) + || input.peek(kw::search) + || input.peek(Token![match]) + || input.peek(Token![in]) + || input.peek(kw::nin) + || input.peek(kw::none_of) + || input.peek(kw::any_of) + || input.peek(kw::subset_of) + } + } + + pub fn validate_function_name(input: ParseStream) -> Result { + if input.peek(kw::length) { + input.parse::()?; + return Ok(Ident::new("length", input.span())); + } + if input.peek(kw::value) { + input.parse::()?; + return Ok(Ident::new("value", input.span())); + } + if input.peek(kw::count) { + input.parse::()?; + return Ok(Ident::new("count", input.span())); + } + if input.peek(kw::search) { + input.parse::()?; + return Ok(Ident::new("search", input.span())); + } + if input.peek(Token![match]) { + input.parse::()?; + return Ok(Ident::new("match", input.span())); + } + if input.peek(Token![in]) { + input.parse::()?; + return Ok(Ident::new("in", input.span())); + } + if input.peek(kw::nin) { + input.parse::()?; + return Ok(Ident::new("nin", input.span())); + } + if input.peek(kw::none_of) { + input.parse::()?; + return Ok(Ident::new("none_of", input.span())); + } + if input.peek(kw::any_of) { + input.parse::()?; + return Ok(Ident::new("any_of", input.span())); + } + if input.peek(kw::subset_of) { + input.parse::()?; + return Ok(Ident::new("subset_of", input.span())); + } + Err(syn::Error::new( + input.span(), + "invalid function name, expected one of: length, value, count, search, match, in, nin, none_of, any_of, subset_of", + )) + } + + impl ParseUtilsExt for RelQuery { + fn peek(input: ParseStream) -> bool { + input.peek(Token![@]) + } + } + + impl ParseUtilsExt for SingularQuery { + fn peek(input: ParseStream) -> bool { + RelSingularQuery::peek(input) || AbsSingularQuery::peek(input) + } + } + + impl ParseUtilsExt for RelSingularQuery { + fn peek(input: ParseStream) -> bool { + input.peek(Token![@]) + } + } + + impl ParseUtilsExt for AbsSingularQuery { + fn peek(input: ParseStream) -> bool { + Root::peek(input) + } + } + + impl ParseUtilsExt for SingularQuerySegment { + fn peek(input: ParseStream) -> bool { + NameSegment::peek(input) || IndexSegment::peek(input) + } + } + + impl ParseUtilsExt for NameSegment { + fn peek(input: ParseStream) -> bool { + input.peek(token::Bracket) || input.peek(Token![.]) + } + } + + impl ParseUtilsExt for IndexSegment { + fn peek(input: ParseStream) -> bool { + input.peek(token::Bracket) + } + } + + impl ParseUtilsExt for Literal { + fn peek(input: ParseStream) -> bool { + Number::peek(input) || JSString::peek(input) || Bool::peek(input) || Null::peek(input) + } + } + + pub fn parse_float(input: ParseStream) -> syn::Result { + let f = input.parse::()?; + Ok(f.base10_parse::()?) + } + + impl ParseUtilsExt for Number { + fn peek(input: ParseStream) -> bool { + JSInt::peek(input) || input.peek(syn::LitFloat) + } + } + + pub fn parse_bool(input: ParseStream) -> Result { + let lit_bool = input.parse::()?; + Ok(lit_bool.value) + } + + impl ParseUtilsExt for Bool { + fn peek(input: ParseStream) -> bool { + input.peek(LitBool) + } + } + + impl ParseUtilsExt for Null { + fn peek(input: ParseStream) -> bool { + input.peek(kw::null) + } + } +} diff --git a/jsonpath-rust-impl/Cargo.toml b/jsonpath-rust-impl/Cargo.toml new file mode 100644 index 0000000..bec4889 --- /dev/null +++ b/jsonpath-rust-impl/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "jsonpath-rust-impl" +version = "0.1.0" +edition = "2024" + +[lib] +proc-macro = true + +[dependencies] +jsonpath-ast = { path = "../jsonpath-ast", features = ["compiled-path"] } +syn = "2.0.101" +proc-macro2 = "1.0.95" +quote = "1.0.40" + +[dev-dependencies] +trybuild = "1.0.105" + diff --git a/jsonpath-rust-impl/src/lib.rs b/jsonpath-rust-impl/src/lib.rs new file mode 100644 index 0000000..7aac062 --- /dev/null +++ b/jsonpath-rust-impl/src/lib.rs @@ -0,0 +1,10 @@ +use jsonpath_ast::ast::Main; +use proc_macro::TokenStream; +use quote::quote; +use syn::parse_macro_input; + +#[proc_macro] +pub fn json_query(input: TokenStream) -> TokenStream { + let main = parse_macro_input!(input as Main); + quote! {#main}.into() +} diff --git a/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/compile_and_passes.rs b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/compile_and_passes.rs new file mode 100644 index 0000000..b4b2435 --- /dev/null +++ b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/compile_and_passes.rs @@ -0,0 +1,307 @@ +// Test case: 00_root +// Tags: No tags +#[test] +fn test_00_root() { + let q_ast = ::jsonpath_rust_impl::json_query!($); + let q_pest = ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 03_name_shorthand +// Tags: No tags +#[test] +fn test_03_name_shorthand() { + let q_ast = ::jsonpath_rust_impl::json_query!($.a); + let q_pest = ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.a"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 04_name_shorthand_extended_unicode_uc +// Tags: No tags +#[test] +fn test_04_name_shorthand_extended_unicode_uc() { + let q_ast = ::jsonpath_rust_impl::json_query!($["☺"]); + #[allow(unused_variables)] + let q_pest = ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.☺"#).expect("failed to parse"); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$["☺"]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 05_name_shorthand_underscore +// Tags: No tags +#[test] +fn test_05_name_shorthand_underscore() { + let q_ast = ::jsonpath_rust_impl::json_query!($._); + let q_pest = ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$._"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 08_name_shorthand_absent_data +// Tags: No tags +#[test] +fn test_08_name_shorthand_absent_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($.c); + let q_pest = ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.c"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 09_name_shorthand_array_data +// Tags: No tags +#[test] +fn test_09_name_shorthand_array_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($.a); + let q_pest = ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.a"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 10_name_shorthand_object_data_nested +// Tags: No tags +#[test] +fn test_10_name_shorthand_object_data_nested() { + let q_ast = ::jsonpath_rust_impl::json_query!($.a.b.c); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.a.b.c"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 11_wildcard_shorthand_object_data +// Tags: No tags +#[test] +fn test_11_wildcard_shorthand_object_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($.*); + let q_pest = ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.*"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 12_wildcard_shorthand_array_data +// Tags: No tags +#[test] +fn test_12_wildcard_shorthand_array_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($.*); + let q_pest = ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.*"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 13_wildcard_selector_array_data +// Tags: No tags +#[test] +fn test_13_wildcard_selector_array_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($[*]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[*]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 14_wildcard_shorthand_then_name_shorthand +// Tags: No tags +#[test] +fn test_14_wildcard_shorthand_then_name_shorthand() { + let q_ast = ::jsonpath_rust_impl::json_query!($.*.a); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.*.a"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 15_multiple_selectors +// Tags: No tags +#[test] +fn test_15_multiple_selectors() { + let q_ast = ::jsonpath_rust_impl::json_query!($[0,2]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[0,2]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 19_multiple_selectors_name_and_index_array_data(EDITED: due to macro limitations) +// Tags: No tags +#[test] +fn test_19_multiple_selectors_name_and_index_array_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($["a",1]); + let q_pest_single = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$['a',1]"#).expect("failed to parse"); + let q_pest_double = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$["a",1]"#).expect("failed to parse"); + assert_eq!(q_pest_single, q_ast); + assert_eq!(q_pest_single, q_pest_double); +} + +// Test case: 20_multiple_selectors_name_and_index_object_data(EDITED: due to macro limitations) +// Tags: No tags +#[test] +fn test_20_multiple_selectors_name_and_index_object_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($["a",1]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$['a',1]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 21_multiple_selectors_index_and_slice +// Tags: No tags +#[test] +fn test_21_multiple_selectors_index_and_slice() { + let q_ast = ::jsonpath_rust_impl::json_query!($[1,5:7]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[1,5:7]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 22_multiple_selectors_index_and_slice_overlapping +// Tags: No tags +#[test] +fn test_22_multiple_selectors_index_and_slice_overlapping() { + let q_ast = ::jsonpath_rust_impl::json_query!($[1,0:3]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[1,0:3]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 23_multiple_selectors_duplicate_index +// Tags: No tags +#[test] +fn test_23_multiple_selectors_duplicate_index() { + let q_ast = ::jsonpath_rust_impl::json_query!($[1,1]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[1,1]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 24_multiple_selectors_wildcard_and_index +// Tags: No tags +#[test] +fn test_24_multiple_selectors_wildcard_and_index() { + let q_ast = ::jsonpath_rust_impl::json_query!($[*,1]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[*,1]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 25_multiple_selectors_wildcard_and_name(EDITED: due to macro limitations) +// Tags: No tags +#[test] +fn test_25_multiple_selectors_wildcard_and_name() { + let q_ast = ::jsonpath_rust_impl::json_query!($[*,"a"]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[*,'a']"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 26_multiple_selectors_wildcard_and_slice +// Tags: No tags +#[test] +fn test_26_multiple_selectors_wildcard_and_slice() { + let q_ast = ::jsonpath_rust_impl::json_query!($[*,0:2]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[*,0:2]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 27_multiple_selectors_multiple_wildcards +// Tags: No tags +#[test] +fn test_27_multiple_selectors_multiple_wildcards() { + let q_ast = ::jsonpath_rust_impl::json_query!($[*,*]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[*,*]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 29_descendant_segment_index +// Tags: No tags +#[test] +fn test_29_descendant_segment_index() { + let q_ast = ::jsonpath_rust_impl::json_query!($..[1]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..[1]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 30_descendant_segment_name_shorthand +// Tags: No tags +#[test] +fn test_30_descendant_segment_name_shorthand() { + let q_ast = ::jsonpath_rust_impl::json_query!($..a); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..a"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 31_descendant_segment_wildcard_shorthand_array_data +// Tags: No tags +#[test] +fn test_31_descendant_segment_wildcard_shorthand_array_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($..*); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..*"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 32_descendant_segment_wildcard_selector_array_data +// Tags: No tags +#[test] +fn test_32_descendant_segment_wildcard_selector_array_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($..[*]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..[*]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 33_descendant_segment_wildcard_selector_nested_arrays +// Tags: No tags +#[test] +fn test_33_descendant_segment_wildcard_selector_nested_arrays() { + let q_ast = ::jsonpath_rust_impl::json_query!($..[*]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..[*]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 34_descendant_segment_wildcard_selector_nested_objects +// Tags: No tags +#[test] +fn test_34_descendant_segment_wildcard_selector_nested_objects() { + let q_ast = ::jsonpath_rust_impl::json_query!($..[*]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..[*]"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 35_descendant_segment_wildcard_shorthand_object_data +// Tags: No tags +#[test] +fn test_35_descendant_segment_wildcard_shorthand_object_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($..*); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..*"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 36_descendant_segment_wildcard_shorthand_nested_data +// Tags: No tags +#[test] +fn test_36_descendant_segment_wildcard_shorthand_nested_data() { + let q_ast = ::jsonpath_rust_impl::json_query!($..*); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..*"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 37_descendant_segment_multiple_selectors(EDITED: due to macro limitations) +// Tags: No tags +#[test] +fn test_37_descendant_segment_multiple_selectors() { + let q_ast = ::jsonpath_rust_impl::json_query!($..["a","d"]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..['a','d']"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} + +// Test case: 38_descendant_segment_object_traversal_multiple_selectors(EDITED: due to macro limitations) +// Tags: No tags +#[test] +fn test_38_descendant_segment_object_traversal_multiple_selectors() { + let q_ast = ::jsonpath_rust_impl::json_query!($..["a","d"]); + let q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$..['a','d']"#).expect("failed to parse"); + assert_eq!(q_pest, q_ast); +} diff --git a/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/compile_but_expect_err.rs b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/compile_but_expect_err.rs new file mode 100644 index 0000000..1c85152 --- /dev/null +++ b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/compile_but_expect_err.rs @@ -0,0 +1,98 @@ +// Test case: 01_no_leading_whitespace +// Tags: whitespace +#[test] +fn test_01_no_leading_whitespace() { + // let q_ast = ::jsonpath_rust_impl::json_query!( $); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#" $"#).expect_err("should not parse"); +} + +// Test case: 02_no_trailing_whitespace +// Tags: whitespace +#[test] +fn test_02_no_trailing_whitespace() { + // let q_ast = ::jsonpath_rust_impl::json_query!($ ); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$ "#).expect_err("should not parse"); +} + +// Test case: 06_name_shorthand_symbol +// Tags: No tags +#[test] +fn test_06_name_shorthand_symbol() { + // let q_ast = ::jsonpath_rust_impl::json_query!($.&); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.&"#).expect_err("should not parse"); +} + +// Test case: 07_name_shorthand_number +// Tags: No tags +#[test] +fn test_07_name_shorthand_number() { + // let q_ast = ::jsonpath_rust_impl::json_query!($.1); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.1"#).expect_err("should not parse"); +} + +// Test case: 16_multiple_selectors_space_instead_of_comma +// Tags: whitespace +#[test] +fn test_16_multiple_selectors_space_instead_of_comma() { + // let q_ast = ::jsonpath_rust_impl::json_query!($[0 2]); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[0 2]"#).expect_err("should not parse"); +} + +// Test case: 17_selector_leading_comma +// Tags: No tags +#[test] +fn test_17_selector_leading_comma() { + // let q_ast = ::jsonpath_rust_impl::json_query!($[,0]); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[,0]"#).expect_err("should not parse"); +} + +// Test case: 18_selector_trailing_comma +// Tags: No tags +#[test] +fn test_18_selector_trailing_comma() { + // let q_ast = ::jsonpath_rust_impl::json_query!($[0,]); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[0,]"#).expect_err("should not parse"); +} + +// Test case: 28_empty_segment +// Tags: No tags +#[test] +fn test_28_empty_segment() { + // let q_ast = ::jsonpath_rust_impl::json_query!($[]); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[]"#).expect_err("should not parse"); +} + +// Test case: 39_bald_descendant_segment +// Tags: No tags +#[test] +fn test_39_bald_descendant_segment() { + // let q_ast = ::jsonpath_rust_impl::json_query!($..); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$.."#).expect_err("should not parse"); +} + +// Test case: 40_current_node_identifier_without_filter_selector +// Tags: No tags +#[test] +fn test_40_current_node_identifier_without_filter_selector() { + // let q_ast = ::jsonpath_rust_impl::json_query!($[@.a]); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[@.a]"#).expect_err("should not parse"); +} + +// Test case: 41_root_node_identifier_in_brackets_without_filter_selector +// Tags: No tags +#[test] +fn test_41_root_node_identifier_in_brackets_without_filter_selector() { + // let q_ast = ::jsonpath_rust_impl::json_query!($[$.a]); + let _q_pest = + ::jsonpath_ast::ast::Main::try_from_pest_parse(r#"$[$.a]"#).expect_err("should not parse"); +} diff --git a/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/does_not_compile.rs b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/does_not_compile.rs new file mode 100644 index 0000000..167289b --- /dev/null +++ b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/does_not_compile.rs @@ -0,0 +1,67 @@ +// Test case: 01_no_leading_whitespace(DISABLED: due to macro limitations) +// Tags: whitespace +// fn test_01_no_leading_whitespace() { +// ::jsonpath_rust_impl::json_query!( $); +// } + +// Test case: 02_no_trailing_whitespace(DISABLED: due to macro limitations) +// Tags: whitespace +// fn test_02_no_trailing_whitespace() { +// ::jsonpath_rust_impl::json_query!($ ); +// } + +// Test case: 06_name_shorthand_symbol +// Tags: No tags +fn test_06_name_shorthand_symbol() { + ::jsonpath_rust_impl::json_query!($.&); +} + +// Test case: 07_name_shorthand_number +// Tags: No tags +fn test_07_name_shorthand_number() { + ::jsonpath_rust_impl::json_query!($.1); +} + +// Test case: 16_multiple_selectors_space_instead_of_comma +// Tags: whitespace +fn test_16_multiple_selectors_space_instead_of_comma() { + ::jsonpath_rust_impl::json_query!($[0 2]); +} + +// Test case: 17_selector_leading_comma +// Tags: No tags +fn test_17_selector_leading_comma() { + ::jsonpath_rust_impl::json_query!($[,0]); +} + +// Test case: 18_selector_trailing_comma +// Tags: No tags +fn test_18_selector_trailing_comma() { + ::jsonpath_rust_impl::json_query!($[0,]); +} + +// Test case: 28_empty_segment +// Tags: No tags +fn test_28_empty_segment() { + ::jsonpath_rust_impl::json_query!($[]); +} + +// Test case: 39_bald_descendant_segment +// Tags: No tags +fn test_39_bald_descendant_segment() { + ::jsonpath_rust_impl::json_query!($..); +} + +// Test case: 40_current_node_identifier_without_filter_selector +// Tags: No tags +fn test_40_current_node_identifier_without_filter_selector() { + ::jsonpath_rust_impl::json_query!($[@.a]); +} + +// Test case: 41_root_node_identifier_in_brackets_without_filter_selector +// Tags: No tags +fn test_41_root_node_identifier_in_brackets_without_filter_selector() { + ::jsonpath_rust_impl::json_query!($[$.a]); +} + +fn main() {} diff --git a/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/does_not_compile.stderr b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/does_not_compile.stderr new file mode 100644 index 0000000..8e03511 --- /dev/null +++ b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/does_not_compile.stderr @@ -0,0 +1,55 @@ +error: unexpected token + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:16:41 + | +16 | ::jsonpath_rust_impl::json_query!($.&); + | ^ + +error: unexpected token + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:22:41 + | +22 | ::jsonpath_rust_impl::json_query!($.1); + | ^ + +error: unexpected token, expected `]` + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:28:43 + | +28 | ::jsonpath_rust_impl::json_query!($[0 2]); + | ^ + +error: unexpected token + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:34:41 + | +34 | ::jsonpath_rust_impl::json_query!($[,0]); + | ^ + +error: unexpected end of input + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:40:43 + | +40 | ::jsonpath_rust_impl::json_query!($[0,]); + | ^ + +error: unexpected end of input + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:46:41 + | +46 | ::jsonpath_rust_impl::json_query!($[]); + | ^ + +error: unexpected end of input + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:52:5 + | +52 | ::jsonpath_rust_impl::json_query!($..); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the macro `::jsonpath_rust_impl::json_query` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: unexpected token + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:58:41 + | +58 | ::jsonpath_rust_impl::json_query!($[@.a]); + | ^ + +error: unexpected token + --> tests/rfc9535_compile_tests/basic/does_not_compile.rs:64:41 + | +64 | ::jsonpath_rust_impl::json_query!($[$.a]); + | ^ diff --git a/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/mod.rs b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/mod.rs new file mode 100644 index 0000000..31def23 --- /dev/null +++ b/jsonpath-rust-impl/tests/rfc9535_compile_tests/basic/mod.rs @@ -0,0 +1,2 @@ +pub(crate) mod compile_and_passes; +pub(crate) mod compile_but_expect_err; diff --git a/jsonpath-rust-impl/tests/rfc9535_compile_tests/mod.rs b/jsonpath-rust-impl/tests/rfc9535_compile_tests/mod.rs new file mode 100644 index 0000000..f641bc5 --- /dev/null +++ b/jsonpath-rust-impl/tests/rfc9535_compile_tests/mod.rs @@ -0,0 +1 @@ +pub(crate) mod basic; diff --git a/jsonpath-rust-impl/tests/test.rs b/jsonpath-rust-impl/tests/test.rs new file mode 100644 index 0000000..6a418ed --- /dev/null +++ b/jsonpath-rust-impl/tests/test.rs @@ -0,0 +1,69 @@ +#![allow(non_ascii_idents)] + +mod rfc9535_compile_tests; + +#[cfg(test)] +mod tests { + use jsonpath_ast::ast::Main; + use jsonpath_rust_impl::json_query; + + #[test] + fn scratch() { + let q_ast = json_query!($.values[?match(@, $.regex)]).into(); + json_query!( $..[1] ); + json_query!( $[1,::] ); + + assert_eq!( + json_query!( $["a",1] ), + Main::try_from_pest_parse("$['a',1]").expect("failed to parse") + ); + Main::try_from_pest_parse("$['a',1,4]").expect("failed to parse"); + assert_eq!( + json_query!( $ . _ ), + Main::try_from_pest_parse("$._").expect("failed to parse") + ); + let _ = json_query!($["a☺a"]); + let _ = json_query!($[0,2]); + let _ = Main::try_from_pest_parse("$ [ 0 , 2]") + .expect("should work"); + let _ = Main::try_from_pest_parse(" $[0,2]").expect_err("failed to parse"); + let _ = Main::try_from_pest_parse("$[0,2] ").expect_err("failed to parse"); + } + + #[test] + fn syn_and_pest_are_equal() { + let q1 = ( + json_query!( $[?@.thing > 4] ), + Main::try_from_pest_parse("$[?@.thing > 4]").expect("failed to parse"), + ); + + assert_eq!( + json_query!( $[?@.thing > 4] ), + Main::try_from_pest_parse("$[?@.thing > 4]").expect("failed to parse") + ); + + // let q2: Main = Main::try_from_pest_parse("$[?@.thing >= 5, ?@.thing <= 6]").expect("failed to parse"); + // let q3: Main = Main::try_from_pest_parse("$[?@.thing >= 5, ?@.thing <= 6.0]").expect("failed to parse"); + // let q4: Main = Main::try_from_pest_parse("$[?@.thing >= 5, ?@.thing == true]").expect("failed to parse"); + // let q5: Main = Main::try_from_pest_parse("$[?@.thing >= 5, ?@.thing != null]").expect("failed to parse"); + + // let q1: Main = json_query!($[?@.thing >= 5]); + // let q2: Main = Main::try_from_pest_parse("$[?@.thing >= 5]").expect("failed to parse"); + + assert_eq!(q1.0, q1.1); + } + + // fn test_☺_() + + /// Common function to run trybuild for all in suite dir + fn trybuild(dir: &str) { + let t = ::trybuild::TestCases::new(); + let fail_path = format!("tests/rfc9535_compile_tests/{}/does_not_compile.rs", dir); + t.compile_fail(fail_path); + } + + #[test] + fn test_rfc_case_basic() { + trybuild("basic"); + } +} diff --git a/rfc9535/Cargo.toml b/rfc9535/Cargo.toml index 0166a95..0edba39 100644 --- a/rfc9535/Cargo.toml +++ b/rfc9535/Cargo.toml @@ -8,7 +8,7 @@ readme = "README.md" [dependencies] -jsonpath-rust = { path = "../" } +jsonpath-rust = { path = "../", features = ["compiled-path"]} serde_json = "1.0" serde = { version = "1.0.217", features = ["derive"] } colored = "2.0" diff --git a/rfc9535/test_suite/results.csv b/rfc9535/test_suite/results.csv index 7b686e0..996936c 100644 --- a/rfc9535/test_suite/results.csv +++ b/rfc9535/test_suite/results.csv @@ -1,6 +1,4 @@ Total; Passed; Failed; Date -687; 652; 10; 2025-03-18 22:29:42 -687; 652; 5; 2025-03-18 22:34:48 687; 652; 4; 2025-03-18 22:40:55 687; 652; 2; 2025-03-18 22:41:43 687; 652; 1; 2025-03-18 22:41:53 @@ -10,3 +8,4 @@ Total; Passed; Failed; Date 687; 652; 0; 2025-03-18 22:57:01 687; 652; 0; 2025-05-13 21:21:59 687; 652; 0; 2025-05-19 15:05:31 +687; 652; 0; 2025-05-29 10:41:32 diff --git a/src/parser/grammar/json_path_9535.pest b/src/parser/grammar/json_path_9535.pest index 380fb59..33e5f17 100644 --- a/src/parser/grammar/json_path_9535.pest +++ b/src/parser/grammar/json_path_9535.pest @@ -25,11 +25,12 @@ comp_expr = { comparable ~ S ~ comp_op ~ S ~ comparable } test_expr = {not_op? ~ S ~ test} test = {rel_query | jp_query | function_expr} rel_query = {curr ~ S ~ segments} -function_expr = { function_name ~ "(" ~ S ~ (function_argument ~ (S ~ "," ~ S ~ function_argument)*)? ~ S ~ ")" } -function_name = { function_name_first ~ function_name_char* } -function_name_first = { LCALPHA } -function_name_char = { function_name_first | "_" | DIGIT } -function_argument = { literal | test | logical_expr } +function_expr = { ( function_name_one_arg ~ one_arg ) | ( function_name_two_arg ~ two_arg ) } +function_name_one_arg = { "length" | "value" | "count" } +function_name_two_arg = { "search" | "match" | "in" | "nin" | "none_of" | "any_of" | "subset_of" } +function_argument = { literal | test | logical_expr } +one_arg = _{ "(" ~ S ~ function_argument ~ S ~ ")" } +two_arg = _{ "(" ~ S ~ function_argument ~ S ~ "," ~ S ~ function_argument ~ S ~ ")" } comparable = { literal | singular_query | function_expr } literal = { number | string | bool | null } bool = {"true" | "false"}