Skip to content

Commit

Permalink
chore: Add rustfmt.toml
Browse files Browse the repository at this point in the history
  • Loading branch information
stoically committed Oct 24, 2022
1 parent 89f7d45 commit b79d04f
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 22 deletions.
4 changes: 4 additions & 0 deletions rustfmt.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
wrap_comments = true
format_code_in_doc_comments = true
imports_granularity = "Crate"
group_imports = "StdExternalCrate"
4 changes: 2 additions & 2 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ impl ParserConfig {
/// this case, meaning no tokens can be left in the stream.
///
/// If `None` is returned, parsing happens with the original `ParseStream`,
/// since the tokens that are passend into the transform callback are a fork,
/// which gets only advanced if `Some` is returned.
/// since the tokens that are passend into the transform callback are a
/// fork, which gets only advanced if `Some` is returned.
///
/// An example usage might be a custom syntax inside blocks which isn't
/// valid Rust. The given example simply translates the `%` character into
Expand Down
26 changes: 15 additions & 11 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
//! [`syn`]-powered parser for JSX-like [`TokenStream`]s, aka RSX. The parsed result is a
//! nested [`Node`] structure, similar to the browser DOM, where node name and
//! value are syn expressions to support building proc macros.
//! [`syn`]-powered parser for JSX-like [`TokenStream`]s, aka RSX. The parsed
//! result is a nested [`Node`] structure, similar to the browser DOM, where
//! node name and value are syn expressions to support building proc macros.
//!
//! ```rust
//! # fn main() -> eyre::Result<()> {
Expand Down Expand Up @@ -104,7 +104,8 @@
//! # }).unwrap();
//! ```
//!
//! - **Attribute values can be any valid syn expression without requiring braces**
//! - **Attribute values can be any valid syn expression without requiring
//! braces**
//!
//! ```rust
//! # use quote::quote;
Expand All @@ -126,14 +127,15 @@
//!
//! - **Customization**
//!
//! A [`ParserConfig`] to customize parsing behavior is available, so if you have
//! slightly different requirements for parsing and it's not yet customizable
//! feel free to open an issue or pull request to extend the configuration.
//! A [`ParserConfig`] to customize parsing behavior is available, so if you
//! have slightly different requirements for parsing and it's not yet
//! customizable feel free to open an issue or pull request to extend the
//! configuration.
//!
//! One highlight with regards to customization is the [`transform_block`]
//! configuration, which takes a closure that receives raw block content as
//! `ParseStream` and lets you optionally convert it to a `TokenStream`. That makes it
//! possible to have custom syntax in blocks. More details in [#9]
//! `ParseStream` and lets you optionally convert it to a `TokenStream`. That
//! makes it possible to have custom syntax in blocks. More details in [#9]
//!
//!
//! [`syn`]: /syn
Expand Down Expand Up @@ -180,7 +182,8 @@ pub fn parse(tokens: proc_macro::TokenStream) -> Result<Vec<Node>> {
parser.parse(tokens)
}

/// Parse the given [`proc-macro::TokenStream`] into a [`Node`] tree with custom [`ParserConfig`].
/// Parse the given [`proc-macro::TokenStream`] into a [`Node`] tree with custom
/// [`ParserConfig`].
///
/// [`proc-macro::TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
/// [`Node`]: struct.Node.html
Expand All @@ -204,7 +207,8 @@ pub fn parse2(tokens: proc_macro2::TokenStream) -> Result<Vec<Node>> {
parser.parse2(tokens)
}

/// Parse the given [`proc-macro2::TokenStream`] into a [`Node`] tree with custom [`ParserConfig`].
/// Parse the given [`proc-macro2::TokenStream`] into a [`Node`] tree with
/// custom [`ParserConfig`].
///
/// [`proc-macro2::TokenStream`]: https://docs.rs/proc-macro2/latest/proc_macro2/struct.TokenStream.html
/// [`Node`]: struct.Node.html
Expand Down
3 changes: 2 additions & 1 deletion src/node.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
//! Tree of nodes.
use std::{convert::TryFrom, fmt, ops::Deref};

use proc_macro2::TokenStream;
use quote::ToTokens;
use std::{convert::TryFrom, fmt, ops::Deref};
use syn::{punctuated::Punctuated, token::Colon, Expr, ExprBlock, ExprLit, ExprPath, Ident, Lit};

use crate::{punctuation::Dash, Error};
Expand Down
22 changes: 14 additions & 8 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,8 @@ impl Parser {

/// Parse the next [`Node`] in the tree.
///
/// To improve performance it peeks the next 1-3 tokens and calls the according node parser function depening on that.
/// To improve performance it peeks the next 1-3 tokens and calls the
/// according node parser function depening on that.
fn node(&self, input: ParseStream) -> Result<Vec<Node>> {
let mut node = if input.peek(Token![<]) {
if input.peek2(Token![!]) {
Expand Down Expand Up @@ -115,7 +116,9 @@ impl Parser {
Ok(Node::Block(NodeBlock { value }))
}

/// Replace the next [`TokenTree::Group`] in the given parse stream with a token stream returned by a user callback, or parse as original block if no token stream is returned.
/// Replace the next [`TokenTree::Group`] in the given parse stream with a
/// token stream returned by a user callback, or parse as original block if
/// no token stream is returned.
fn block_transform(&self, input: ParseStream, transform_fn: &TransformBlockFn) -> Result<Expr> {
let parser = move |block_content: ParseStream| {
let forked_block_content = block_content.fork();
Expand Down Expand Up @@ -209,7 +212,8 @@ impl Parser {
}))
}

/// Check whether the next token in the stream is a closing tag to decide whether the node element has children.
/// Check whether the next token in the stream is a closing tag to decide
/// whether the node element has children.
fn element_has_children(&self, tag_open_name: &NodeName, input: ParseStream) -> Result<bool> {
// An empty input at this point means the tag wasn't closed.
if input.is_empty() {
Expand All @@ -224,15 +228,17 @@ impl Parser {
// If the next token is a matching close tag then there are no child nodes.
return Ok(false);
} else {
// If the next token is a closing tag with a different name it's an invalid tree.
// If the next token is a closing tag with a different name it's an invalid
// tree.
return Err(input.error("close tag has no corresponding open tag"));
}
}

Ok(true)
}

/// Parse the stream as opening or self-closing tag and extract its attributes.
/// Parse the stream as opening or self-closing tag and extract its
/// attributes.
fn tag_open(&self, input: ParseStream) -> Result<(NodeName, Vec<Node>, bool)> {
input.parse::<Token![<]>()?;
let name = self.node_name(input)?;
Expand Down Expand Up @@ -448,9 +454,9 @@ impl Parser {

/// Parse the stream as punctuated idents.
///
/// We can't replace this with [`Punctuated::parse_separated_nonempty`] since
/// that doesn't support reserved keywords. Might be worth to consider a PR
/// upstream.
/// We can't replace this with [`Punctuated::parse_separated_nonempty`]
/// since that doesn't support reserved keywords. Might be worth to
/// consider a PR upstream.
///
/// [`Punctuated::parse_separated_nonempty`]: https://docs.rs/syn/1.0.58/syn/punctuated/struct.Punctuated.html#method.parse_separated_nonempty
fn node_name_punctuated_ident<T: Parse, F: Peek, X: From<Ident>>(
Expand Down

0 comments on commit b79d04f

Please sign in to comment.