}, [other context]};
+ /// ```
+ ///
+ /// Then one can set `macro_call_pattern` with following arguments:
+ /// ```no_compile
+ /// config.macro_call_pattern(quote!(html! // macro name currently is not checked
+ /// {ident, ident, // can use real idents, or any other
+ /// [/* can ignore context of auxilary groups */],
+ /// {%%}, // important part
+ /// []
+ /// }))
+ /// ```
+ ///
+ /// And rstml will do the rest for you.
+ ///
+ /// Panics if no `%%` token was found.
+ ///
+ /// If macro_call_patern is set rstml will parse input two times in order to
+ /// recover spaces in `RawText`. Rstml will panic if macro source text
+ /// is not possible to recover.
+ #[cfg(feature = "rawtext-stable-hack")]
+ pub fn macro_call_pattern(mut self, pattern: TokenStream) -> Self {
+ self.macro_pattern =
+ MacroPattern::from_token_stream(pattern).expect("No %% token found in pattern.");
+ self
+ }
+
/// Enables parsing for [`Node::Custom`] using a type implementing
/// [`CustomNode`].
pub fn custom_node
(self) -> ParserConfig {
@@ -221,6 +291,8 @@ impl ParserConfig {
always_self_closed_elements: self.always_self_closed_elements,
raw_text_elements: self.raw_text_elements,
element_close_wildcard: self.element_close_wildcard,
+ #[cfg(feature = "rawtext-stable-hack")]
+ macro_pattern: self.macro_pattern,
custom_node: Default::default(),
}
}
diff --git a/src/lib.rs b/src/lib.rs
index 37eca6b..a748dd1 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -231,9 +231,13 @@ mod config;
mod error;
pub mod node;
mod parser;
+#[doc(hidden)] // Currently its api is not planned to be stable.
+#[cfg(feature = "rawtext-stable-hack-module")]
+pub mod rawtext_stable_hack;
+pub mod visitor;
pub use config::ParserConfig;
pub use error::Error;
-pub use node::atoms;
+pub use node::{atoms, Infallible};
use node::{CustomNode, Node};
pub use parser::{recoverable, recoverable::ParsingResult, Parser};
@@ -252,7 +256,7 @@ pub fn parse(tokens: proc_macro::TokenStream) -> Result> {
/// [`Node`]: struct.Node.html
/// [`ParserConfig`]: struct.ParserConfig.html
#[deprecated(since = "0.10.2", note = "use rstml::Parser::parse_simple instead")]
-pub fn parse_with_config(
+pub fn parse_with_config(
tokens: proc_macro::TokenStream,
config: ParserConfig,
) -> Result>> {
@@ -273,7 +277,7 @@ pub fn parse2(tokens: proc_macro2::TokenStream) -> Result> {
/// [`Node`]: struct.Node.html
/// [`ParserConfig`]: struct.ParserConfig.html
#[deprecated(since = "0.10.2", note = "use rstml::Parser::parse_simple instead")]
-pub fn parse2_with_config(
+pub fn parse2_with_config(
tokens: proc_macro2::TokenStream,
config: ParserConfig,
) -> Result>> {
diff --git a/src/node/atoms.rs b/src/node/atoms.rs
index 9b385e9..a70c445 100644
--- a/src/node/atoms.rs
+++ b/src/node/atoms.rs
@@ -43,7 +43,7 @@ pub(crate) mod tokens {
// //
/// Start part of doctype tag
/// ``
- #[derive(Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
+ #[derive(Eq, PartialEq, Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
pub struct ComEnd {
#[parse(parse::parse_array_of2_tokens)]
#[to_tokens(parse::to_tokens_array)]
@@ -72,7 +72,7 @@ pub(crate) mod tokens {
/// End part of element's open tag
/// `/>` or `>`
- #[derive(Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
+ #[derive(Eq, PartialEq, Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
pub struct OpenTagEnd {
pub token_solidus: Option,
pub token_gt: Token![>],
@@ -81,7 +81,7 @@ pub(crate) mod tokens {
/// Start part of element's close tag.
/// Its commonly used as separator
/// ``
- #[derive(Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
+ #[derive(Eq, PartialEq, Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
pub struct CloseTagStart {
pub token_lt: Token![<],
pub token_solidus: Token![/],
@@ -92,7 +92,7 @@ pub use tokens::*;
/// Fragment open part
/// `<>`
-#[derive(Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
+#[derive(Eq, PartialEq, Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
pub struct FragmentOpen {
pub token_lt: Token![<],
pub token_gt: Token![>],
@@ -100,7 +100,7 @@ pub struct FragmentOpen {
/// Fragment close part
/// `>`
-#[derive(Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
+#[derive(Eq, PartialEq, Clone, Debug, syn_derive::Parse, syn_derive::ToTokens)]
pub struct FragmentClose {
pub start_tag: tokens::CloseTagStart,
pub token_gt: Token![>],
diff --git a/src/node/mod.rs b/src/node/mod.rs
index ef57214..f589302 100644
--- a/src/node/mod.rs
+++ b/src/node/mod.rs
@@ -1,9 +1,10 @@
//! Tree of nodes.
-use std::{convert::Infallible, fmt};
+use std::{convert, fmt};
use atoms::{tokens, FragmentClose, FragmentOpen};
use proc_macro2::{Ident, TokenStream};
+use quote::ToTokens;
use syn::{parse::ParseStream, ExprPath, LitStr, Token};
pub mod atoms;
@@ -18,10 +19,10 @@ pub use attribute::{
AttributeValueExpr, FnBinding, KeyedAttribute, KeyedAttributeValue, NodeAttribute,
};
pub use node_name::{NodeName, NodeNameFragment};
-pub use node_value::NodeBlock;
+pub use node_value::{InvalidBlock, NodeBlock};
pub use self::raw_text::RawText;
-use crate::recoverable::RecoverableContext;
+use crate::recoverable::{ParseRecoverable, RecoverableContext};
/// Node types.
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -56,17 +57,32 @@ impl fmt::Display for NodeType {
}
/// Node in the tree.
-#[derive(Clone, Debug, syn_derive::ToTokens)]
-pub enum Node {
+#[derive(Clone, Debug)]
+pub enum Node {
Comment(NodeComment),
Doctype(NodeDoctype),
Fragment(NodeFragment),
Element(NodeElement),
Block(NodeBlock),
Text(NodeText),
- RawText(RawText),
+ RawText(RawText),
Custom(C),
}
+// Manual implementation, because derive macro doesn't support generics.
+impl ToTokens for Node {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ match self {
+ Self::Comment(comment) => comment.to_tokens(tokens),
+ Self::Doctype(doctype) => doctype.to_tokens(tokens),
+ Self::Fragment(fragment) => fragment.to_tokens(tokens),
+ Self::Element(element) => element.to_tokens(tokens),
+ Self::Block(block) => block.to_tokens(tokens),
+ Self::Text(text) => text.to_tokens(tokens),
+ Self::RawText(raw_text) => raw_text.to_tokens(tokens),
+ Self::Custom(custom) => custom.to_tokens(tokens),
+ }
+ }
+}
impl Node {
pub fn flatten(mut self) -> Vec {
@@ -118,13 +134,24 @@ impl Node {
///
/// A HTMLElement tag, with optional children and attributes.
/// Potentially selfclosing. Any tag name is valid.
-#[derive(Clone, Debug, syn_derive::ToTokens)]
-pub struct NodeElement {
+#[derive(Clone, Debug)]
+pub struct NodeElement {
pub open_tag: atoms::OpenTag,
- #[to_tokens(parse::to_tokens_array)]
pub children: Vec>,
pub close_tag: Option,
}
+// Manual implementation, because derive macro doesn't support generics.
+impl ToTokens for NodeElement {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.open_tag.to_tokens(tokens);
+ for child in &self.children {
+ child.to_tokens(tokens);
+ }
+ if let Some(close_tag) = &self.close_tag {
+ close_tag.to_tokens(tokens);
+ }
+ }
+}
impl NodeElement {
pub fn name(&self) -> &NodeName {
@@ -133,6 +160,15 @@ impl NodeElement {
pub fn attributes(&self) -> &[NodeAttribute] {
&self.open_tag.attributes
}
+ pub fn attributes_mut(&mut self) -> &mut Vec {
+ &mut self.open_tag.attributes
+ }
+ pub fn chidlren(&self) -> &[Node] {
+ &self.children
+ }
+ pub fn children_mut(&mut self) -> &mut Vec> {
+ &mut self.children
+ }
}
/// Text node.
@@ -182,16 +218,36 @@ pub struct NodeDoctype {
/// Fragement node.
///
/// Fragment: `<>>`
-#[derive(Clone, Debug, syn_derive::ToTokens)]
-pub struct NodeFragment {
+#[derive(Clone, Debug)]
+pub struct NodeFragment {
/// Open fragment token
pub tag_open: FragmentOpen,
/// Children of the fragment node.
- #[to_tokens(parse::to_tokens_array)]
pub children: Vec>,
/// Close fragment token
pub tag_close: Option,
}
+// Manual implementation, because derive macro doesn't support generics.
+impl ToTokens for NodeFragment {
+ fn to_tokens(&self, tokens: &mut TokenStream) {
+ self.tag_open.to_tokens(tokens);
+ for child in &self.children {
+ child.to_tokens(tokens);
+ }
+ if let Some(close_tag) = &self.tag_close {
+ close_tag.to_tokens(tokens);
+ }
+ }
+}
+
+impl NodeFragment {
+ pub fn children(&self) -> &[Node] {
+ &self.children
+ }
+ pub fn children_mut(&mut self) -> &mut Vec> {
+ &mut self.children
+ }
+}
fn path_to_string(expr: &ExprPath) -> String {
expr.path
@@ -202,34 +258,37 @@ fn path_to_string(expr: &ExprPath) -> String {
.join("::")
}
-pub trait CustomNode: Sized {
- /// Should correspond to [`ToTokens::to_tokens`].
- ///
- /// [`ToTokens::to_tokens`]: quote::ToTokens::to_tokens
- fn to_tokens(&self, tokens: &mut TokenStream);
+pub trait CustomNode: ParseRecoverable + ToTokens {
/// Peeks the token stream to decide whether this node should be parsed.
///
/// Recieves a [`ParseStream::fork`].
///
/// [`ParseStream::fork`]: syn::parse::ParseBuffer::fork
fn peek_element(input: ParseStream) -> bool;
- /// Parses the custom node, only called when [`peek_element`] returns
- /// `true`.
- ///
- /// [`peek_element`]: Self::peek_element
- fn parse_element(parser: &mut RecoverableContext, input: ParseStream) -> Option;
}
-impl CustomNode for Infallible {
- fn to_tokens(&self, _tokens: &mut TokenStream) {
- match *self {}
- }
+/// Newtype for `std::convert::Infallible` used to implement
+/// `ToTokens` for `Infallible`.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub struct Infallible(convert::Infallible);
- fn peek_element(_input: ParseStream) -> bool {
- false
+impl From for Infallible {
+ fn from(s: convert::Infallible) -> Self {
+ match s {}
}
-
- fn parse_element(_parser: &mut RecoverableContext, _input: ParseStream) -> Option {
+}
+impl ToTokens for Infallible {
+ fn to_tokens(&self, _tokens: &mut TokenStream) {
+ match self.0 {}
+ }
+}
+impl ParseRecoverable for Infallible {
+ fn parse_recoverable(_: &mut RecoverableContext, _: ParseStream) -> Option {
unreachable!("Infallible::peek_element returns false")
}
}
+impl CustomNode for Infallible {
+ fn peek_element(_: ParseStream) -> bool {
+ false
+ }
+}
diff --git a/src/node/node_value.rs b/src/node/node_value.rs
index 8671415..aefd6db 100644
--- a/src/node/node_value.rs
+++ b/src/node/node_value.rs
@@ -3,21 +3,25 @@
use std::convert::TryFrom;
use proc_macro2::TokenStream;
-use quote::ToTokens;
use syn::{token::Brace, Block};
+#[derive(Clone, Debug, syn_derive::ToTokens, syn_derive::Parse)]
+pub struct InvalidBlock {
+ #[syn(braced)]
+ brace: Brace,
+ #[syn(in = brace)]
+ body: TokenStream,
+}
+
/// Block node.
///
/// Arbitrary rust code in braced `{}` blocks.
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, syn_derive::ToTokens)]
pub enum NodeBlock {
/// The block value..
ValidBlock(Block),
- Invalid {
- brace: Brace,
- body: TokenStream,
- },
+ Invalid(InvalidBlock),
}
impl NodeBlock {
@@ -40,7 +44,7 @@ impl NodeBlock {
pub fn try_block(&self) -> Option<&Block> {
match self {
Self::ValidBlock(b) => Some(b),
- Self::Invalid { .. } => None,
+ Self::Invalid(_) => None,
}
}
}
@@ -50,21 +54,10 @@ impl TryFrom for Block {
fn try_from(v: NodeBlock) -> Result {
match v {
NodeBlock::ValidBlock(v) => Ok(v),
- NodeBlock::Invalid { .. } => Err(syn::Error::new_spanned(
+ NodeBlock::Invalid(_) => Err(syn::Error::new_spanned(
v,
"Cant parse expression as block.",
)),
}
}
}
-
-impl ToTokens for NodeBlock {
- fn to_tokens(&self, tokens: &mut TokenStream) {
- match self {
- Self::Invalid { brace, body } => {
- brace.surround(tokens, |tokens| body.to_tokens(tokens))
- }
- Self::ValidBlock(b) => b.to_tokens(tokens),
- }
- }
-}
diff --git a/src/node/parse.rs b/src/node/parse.rs
index 860158e..7a23bb8 100644
--- a/src/node/parse.rs
+++ b/src/node/parse.rs
@@ -1,7 +1,7 @@
//!
//! Implementation of ToTokens and Spanned for node related structs
-use proc_macro2::{extra::DelimSpan, Delimiter, TokenStream, TokenTree};
+use proc_macro2::{extra::DelimSpan, Delimiter, TokenStream};
use proc_macro2_diagnostics::{Diagnostic, Level};
use quote::ToTokens;
use syn::{
@@ -38,14 +38,7 @@ impl ParseRecoverable for NodeBlock {
}
Err(e) if parser.config().recover_block => {
parser.push_diagnostic(e);
- let try_block = || {
- let content;
- Ok(NodeBlock::Invalid {
- brace: braced!(content in input),
- body: content.parse()?,
- })
- };
- parser.save_diagnostics(try_block())?
+ NodeBlock::Invalid(parser.parse_simple(input)?)
}
Err(e) => {
parser.push_diagnostic(e);
@@ -69,7 +62,7 @@ impl ParseRecoverable for NodeFragment {
(vec![Node::::RawText(child)], closed_tag)
} else {
let (child, close_tag_start) =
- parser.parse_tokens_until::, _, _>(input, CloseTagStart::parse);
+ parser.parse_tokens_until_call::, _, _>(input, CloseTagStart::parse);
(
child,
FragmentClose::parse_with_start_tag(parser, input, close_tag_start),
@@ -149,7 +142,10 @@ impl ParseRecoverable for OpenTag {
let generics = parser.parse_simple(input)?;
let (attributes, end_tag) = parser
- .parse_tokens_with_ending::(input, tokens::OpenTagEnd::parse);
+ .parse_tokens_with_conflicted_ending::(
+ input,
+ tokens::OpenTagEnd::parse,
+ );
if end_tag.is_none() {
parser.push_diagnostic(Diagnostic::new(Level::Error, "expected end of tag '>'"));
@@ -193,7 +189,7 @@ impl NodeElement {
// invalid closing tags.
// Also parse only part to recover parser as soon as user types
let (children, close_tag) =
- parser.parse_tokens_until::, _, _>(input, CloseTagStart::parse);
+ parser.parse_tokens_until_call::, _, _>(input, CloseTagStart::parse);
let close_tag = CloseTag::parse_with_start_tag(parser, input, close_tag);
@@ -288,7 +284,7 @@ impl ParseRecoverable for NodeElement {
impl ParseRecoverable for Node {
fn parse_recoverable(parser: &mut RecoverableContext, input: ParseStream) -> Option {
let node = if C::peek_element(&input.fork()) {
- Node::Custom(C::parse_element(parser, input)?)
+ Node::Custom(C::parse_recoverable(parser, input)?)
} else if input.peek(Token![<]) {
if input.peek2(Token![!]) {
if input.peek3(Ident) {
@@ -307,7 +303,7 @@ impl ParseRecoverable for Node {
Node::Text(parser.parse_simple(input)?)
} else if !input.is_empty() {
// Parse any input except of any other Node starting
- Node::RawText(parser.parse_simple(input)?)
+ Node::RawText(parser.parse_recoverable(input)?)
} else {
return None;
};
@@ -315,148 +311,6 @@ impl ParseRecoverable for Node {
}
}
-impl RecoverableContext {
- /// Parse array of toknes that is seperated by spaces(tabs, or new lines).
- /// Stop parsing array when other branch could parse anything.
- ///
- /// Example:
- /// ```ignore
- /// # use syn::{parse::{Parser, ParseStream}, Ident, Result, parse_macro_input, Token};
- /// # use rstml::{parse_tokens_until};
- /// # fn main() -> syn::Result<()>{
- /// let tokens:proc_macro2::TokenStream = quote::quote!(few idents seperated by spaces and then minus sign - that will stop parsing).into();
- /// let concat_idents_without_minus = |input: ParseStream| -> Result {
- /// let (idents, _minus) = parser.parse_tokens_until::(input, |i|
- /// i.parse::