Skip to content

Commit

Permalink
Remove intermediate macro_rules
Browse files Browse the repository at this point in the history
Support skipping attribute macros in case tranfsormation
  • Loading branch information
jordy25519 committed May 15, 2023
1 parent c753fa1 commit ec36cae
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 35 deletions.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,16 @@ shouty!(a_b_c); // `A_B_C`
Casey macros can operate on `TokenStream`s e.g.
```rust
snake!(
#[derive(PartialEq)]
struct MockStruct {}
impl MockStruct {
fn test() -> bool { true }
}
);
assert!(mock_struct::test());
assert!(mock_struct::test() == mock_struct::test())
```
All `ident` tokens in the stream will have the case transformation applied (keywords will be ignored).
All `ident` tokens in the stream will have the case transformation applied (keywords and attribute macros will be ignored).

### Gotchas
Type names, including built-in types are not considered keywords e.g. `bool`, `usize`, `i32` etc. and **will** be transformed by casey.
Expand Down
83 changes: 49 additions & 34 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,76 +4,91 @@ use traits::{PascalCaseExt, ShoutySnakeCaseExt, SnakeCaseExt};

/// Apply a string transformation (`transform`) to the input `Ident`-
/// However, it will not apply the transform to rust keywords.
fn transform_non_keyword_ident<Transform>(ident: &Ident, transform: Transform) -> Ident
fn transform_non_keyword_ident<Transform>(ident: &Ident, transform: &Transform) -> Ident
where
Transform: FnOnce(String) -> String,
Transform: Fn(String) -> String,
{
let is_keyword = syn::parse_str::<syn::Ident>(&ident.to_string()).is_err();
let ident_value = ident.to_string();
let is_keyword = syn::parse_str::<syn::Ident>(ident_value.as_str()).is_err();
if is_keyword {
ident.clone()
} else {
Ident::new(&transform(ident.to_string()), Span::call_site())
Ident::new(transform(ident_value).as_str(), Span::call_site())
}
}

macro_rules! transform_idents_in_stream {
($stream:ident, $transform:expr) => {
$stream
.into_iter()
.map(|token_tree| match token_tree {
TokenTree::Ident(ref ident) => {
transform_non_keyword_ident(ident, $transform).into()
}
// find all idents in `TokenGroup` apply and reconstruct the group
TokenTree::Group(ref group) => TokenTree::Group(Group::new(
group.delimiter(),
group
.stream()
.into_iter()
.map(|group_token_tree| {
if let TokenTree::Ident(ref ident) = group_token_tree {
transform_non_keyword_ident(ident, $transform).into()
fn transform_idents_in_stream2<Transform>(stream: TokenStream, transform: &Transform) -> TokenStream
where
Transform: Fn(String) -> String,
{
let mut transformed = TokenStream::new();
let mut attr_macro_hit = false;
for tt_in in stream {
let tt_out = match tt_in {
TokenTree::Punct(punct) => {
attr_macro_hit = punct.as_char() == '#';
punct.into()
}
TokenTree::Literal(l) => {
attr_macro_hit = attr_macro_hit && l.to_string() == "[";
l.into()
}
TokenTree::Ident(ref ident) => transform_non_keyword_ident(ident, transform).into(),
// find all idents in `TokenGroup` apply and reconstruct the group
TokenTree::Group(ref group) => TokenTree::Group(Group::new(
group.delimiter(),
group
.stream()
.into_iter()
.map(|group_token_tree| {
if let TokenTree::Ident(ref ident) = group_token_tree {
if attr_macro_hit {
attr_macro_hit = false;
TokenTree::Ident(ident.clone())
} else {
group_token_tree
transform_non_keyword_ident(ident, transform).into()
}
})
.collect::<TokenStream>(),
)),
_ => token_tree,
})
.collect()
};
} else {
group_token_tree
}
})
.collect::<TokenStream>(),
)),
};
transformed.extend([tt_out]);
}
transformed
}

/// Expands idents in the input stream as UPPERCASE
#[proc_macro]
pub fn upper(stream: TokenStream) -> TokenStream {
transform_idents_in_stream!(stream, &|s: String| s.to_uppercase())
transform_idents_in_stream2(stream, &|s: String| s.to_uppercase())
}

/// Expands idents in the input stream as lowercase
#[proc_macro]
pub fn lower(stream: TokenStream) -> TokenStream {
transform_idents_in_stream!(stream, &|s: String| s.to_lowercase())
transform_idents_in_stream2(stream, &|s: String| s.to_lowercase())
}

/// Expands idents in the input stream as snake_case
/// e.g. `HelloWorld` -> `hello_world`
#[proc_macro]
pub fn snake(stream: TokenStream) -> TokenStream {
transform_idents_in_stream!(stream, &|s: String| s.to_snake_case())
transform_idents_in_stream2(stream, &|s: String| s.to_snake_case())
}

/// Expands idents in the input stream as PascalCase
/// e.g. `helloWorld` -> `HelloWorld`
#[proc_macro]
pub fn pascal(stream: TokenStream) -> TokenStream {
transform_idents_in_stream!(stream, &|s: String| s.to_pascal_case())
transform_idents_in_stream2(stream, &|s: String| s.to_pascal_case())
}

/// Expands idents in the input stream as SHOUTY_CASE
/// e.g. `HelloWorld` -> `HELLO_WORLD`
#[proc_macro]
pub fn shouty(stream: TokenStream) -> TokenStream {
transform_idents_in_stream!(stream, &|s: String| s.to_shouty_snake_case())
transform_idents_in_stream2(stream, &|s: String| s.to_shouty_snake_case())
}
24 changes: 24 additions & 0 deletions tests/integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,3 +127,27 @@ fn declare_struct() {
});
assert!(MockStruct::Test());
}

#[test]
fn skip_attribute_keywords() {
macro_rules! test {
($($name:ident),*) => {
casey::pascal!(
#[repr(usize)]
#[derive(PartialEq, Debug, Copy, Clone)]
pub enum Test {
$($name,)*
}
);
}
}

test! {
one,
two,
three
}

assert!(Test::One == Test::One);
let _ = Test::One.clone();
}

0 comments on commit ec36cae

Please sign in to comment.