From ec36cae6c7a4a00dd108ac611108acd6388cfe97 Mon Sep 17 00:00:00 2001 From: holygits Date: Mon, 15 May 2023 11:54:16 +1000 Subject: [PATCH] Remove intermediate macro_rules Support skipping attribute macros in case tranfsormation --- README.md | 4 ++- src/lib.rs | 83 ++++++++++++++++++++++++++------------------ tests/integration.rs | 24 +++++++++++++ 3 files changed, 76 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index 43dfb91..1dea27b 100644 --- a/README.md +++ b/README.md @@ -20,14 +20,16 @@ shouty!(a_b_c); // `A_B_C` Casey macros can operate on `TokenStream`s e.g. ```rust snake!( + #[derive(PartialEq)] struct MockStruct {} impl MockStruct { fn test() -> bool { true } } ); assert!(mock_struct::test()); + assert!(mock_struct::test() == mock_struct::test()) ``` -All `ident` tokens in the stream will have the case transformation applied (keywords will be ignored). +All `ident` tokens in the stream will have the case transformation applied (keywords and attribute macros will be ignored). ### Gotchas Type names, including built-in types are not considered keywords e.g. `bool`, `usize`, `i32` etc. and **will** be transformed by casey. diff --git a/src/lib.rs b/src/lib.rs index ec6700f..d49a953 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,76 +4,91 @@ use traits::{PascalCaseExt, ShoutySnakeCaseExt, SnakeCaseExt}; /// Apply a string transformation (`transform`) to the input `Ident`- /// However, it will not apply the transform to rust keywords. -fn transform_non_keyword_ident(ident: &Ident, transform: Transform) -> Ident +fn transform_non_keyword_ident(ident: &Ident, transform: &Transform) -> Ident where - Transform: FnOnce(String) -> String, + Transform: Fn(String) -> String, { - let is_keyword = syn::parse_str::(&ident.to_string()).is_err(); + let ident_value = ident.to_string(); + let is_keyword = syn::parse_str::(ident_value.as_str()).is_err(); if is_keyword { ident.clone() } else { - Ident::new(&transform(ident.to_string()), Span::call_site()) + Ident::new(transform(ident_value).as_str(), Span::call_site()) } } -macro_rules! transform_idents_in_stream { - ($stream:ident, $transform:expr) => { - $stream - .into_iter() - .map(|token_tree| match token_tree { - TokenTree::Ident(ref ident) => { - transform_non_keyword_ident(ident, $transform).into() - } - // find all idents in `TokenGroup` apply and reconstruct the group - TokenTree::Group(ref group) => TokenTree::Group(Group::new( - group.delimiter(), - group - .stream() - .into_iter() - .map(|group_token_tree| { - if let TokenTree::Ident(ref ident) = group_token_tree { - transform_non_keyword_ident(ident, $transform).into() +fn transform_idents_in_stream2(stream: TokenStream, transform: &Transform) -> TokenStream +where + Transform: Fn(String) -> String, +{ + let mut transformed = TokenStream::new(); + let mut attr_macro_hit = false; + for tt_in in stream { + let tt_out = match tt_in { + TokenTree::Punct(punct) => { + attr_macro_hit = punct.as_char() == '#'; + punct.into() + } + TokenTree::Literal(l) => { + attr_macro_hit = attr_macro_hit && l.to_string() == "["; + l.into() + } + TokenTree::Ident(ref ident) => transform_non_keyword_ident(ident, transform).into(), + // find all idents in `TokenGroup` apply and reconstruct the group + TokenTree::Group(ref group) => TokenTree::Group(Group::new( + group.delimiter(), + group + .stream() + .into_iter() + .map(|group_token_tree| { + if let TokenTree::Ident(ref ident) = group_token_tree { + if attr_macro_hit { + attr_macro_hit = false; + TokenTree::Ident(ident.clone()) } else { - group_token_tree + transform_non_keyword_ident(ident, transform).into() } - }) - .collect::(), - )), - _ => token_tree, - }) - .collect() - }; + } else { + group_token_tree + } + }) + .collect::(), + )), + }; + transformed.extend([tt_out]); + } + transformed } /// Expands idents in the input stream as UPPERCASE #[proc_macro] pub fn upper(stream: TokenStream) -> TokenStream { - transform_idents_in_stream!(stream, &|s: String| s.to_uppercase()) + transform_idents_in_stream2(stream, &|s: String| s.to_uppercase()) } /// Expands idents in the input stream as lowercase #[proc_macro] pub fn lower(stream: TokenStream) -> TokenStream { - transform_idents_in_stream!(stream, &|s: String| s.to_lowercase()) + transform_idents_in_stream2(stream, &|s: String| s.to_lowercase()) } /// Expands idents in the input stream as snake_case /// e.g. `HelloWorld` -> `hello_world` #[proc_macro] pub fn snake(stream: TokenStream) -> TokenStream { - transform_idents_in_stream!(stream, &|s: String| s.to_snake_case()) + transform_idents_in_stream2(stream, &|s: String| s.to_snake_case()) } /// Expands idents in the input stream as PascalCase /// e.g. `helloWorld` -> `HelloWorld` #[proc_macro] pub fn pascal(stream: TokenStream) -> TokenStream { - transform_idents_in_stream!(stream, &|s: String| s.to_pascal_case()) + transform_idents_in_stream2(stream, &|s: String| s.to_pascal_case()) } /// Expands idents in the input stream as SHOUTY_CASE /// e.g. `HelloWorld` -> `HELLO_WORLD` #[proc_macro] pub fn shouty(stream: TokenStream) -> TokenStream { - transform_idents_in_stream!(stream, &|s: String| s.to_shouty_snake_case()) + transform_idents_in_stream2(stream, &|s: String| s.to_shouty_snake_case()) } diff --git a/tests/integration.rs b/tests/integration.rs index 5d2d05d..db3209f 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -127,3 +127,27 @@ fn declare_struct() { }); assert!(MockStruct::Test()); } + +#[test] +fn skip_attribute_keywords() { + macro_rules! test { + ($($name:ident),*) => { + casey::pascal!( + #[repr(usize)] + #[derive(PartialEq, Debug, Copy, Clone)] + pub enum Test { + $($name,)* + } + ); + } + } + + test! { + one, + two, + three + } + + assert!(Test::One == Test::One); + let _ = Test::One.clone(); +}