Vendor dependencies for 0.3.0 release

This commit is contained in:
2025-09-27 10:29:08 -05:00
parent 0c8d39d483
commit 82ab7f317b
26803 changed files with 16134934 additions and 0 deletions

164
vendor/paste/src/attr.rs vendored Normal file
View File

@@ -0,0 +1,164 @@
use crate::error::Result;
use crate::segment::{self, Segment};
use proc_macro::{Delimiter, Group, Spacing, Span, TokenStream, TokenTree};
use std::iter;
use std::mem;
use std::str::FromStr;
pub fn expand_attr(
attr: TokenStream,
span: Span,
contains_paste: &mut bool,
) -> Result<TokenStream> {
let mut tokens = attr.clone().into_iter();
let mut leading_colons = 0; // $(::)?
let mut leading_path = 0; // $($ident)::+
let mut token;
let group = loop {
token = tokens.next();
match token {
// colon after `$(:)?`
Some(TokenTree::Punct(ref punct))
if punct.as_char() == ':' && leading_colons < 2 && leading_path == 0 =>
{
leading_colons += 1;
}
// ident after `$(::)? $($ident ::)*`
Some(TokenTree::Ident(_)) if leading_colons != 1 && leading_path % 3 == 0 => {
leading_path += 1;
}
// colon after `$(::)? $($ident ::)* $ident $(:)?`
Some(TokenTree::Punct(ref punct)) if punct.as_char() == ':' && leading_path % 3 > 0 => {
leading_path += 1;
}
// eq+value after `$(::)? $($ident)::+`
Some(TokenTree::Punct(ref punct))
if punct.as_char() == '=' && leading_path % 3 == 1 =>
{
let mut count = 0;
if tokens.inspect(|_| count += 1).all(|tt| is_stringlike(&tt)) && count > 1 {
*contains_paste = true;
let leading = leading_colons + leading_path;
return do_paste_name_value_attr(attr, span, leading);
}
return Ok(attr);
}
// parens after `$(::)? $($ident)::+`
Some(TokenTree::Group(ref group))
if group.delimiter() == Delimiter::Parenthesis && leading_path % 3 == 1 =>
{
break group;
}
// bail out
_ => return Ok(attr),
}
};
// There can't be anything else after the first group in a valid attribute.
if tokens.next().is_some() {
return Ok(attr);
}
let mut group_contains_paste = false;
let mut expanded = TokenStream::new();
let mut nested_attr = TokenStream::new();
for tt in group.stream() {
match &tt {
TokenTree::Punct(punct) if punct.as_char() == ',' => {
expanded.extend(expand_attr(
nested_attr,
group.span(),
&mut group_contains_paste,
)?);
expanded.extend(iter::once(tt));
nested_attr = TokenStream::new();
}
_ => nested_attr.extend(iter::once(tt)),
}
}
if !nested_attr.is_empty() {
expanded.extend(expand_attr(
nested_attr,
group.span(),
&mut group_contains_paste,
)?);
}
if group_contains_paste {
*contains_paste = true;
let mut group = Group::new(Delimiter::Parenthesis, expanded);
group.set_span(span);
Ok(attr
.into_iter()
// Just keep the initial ident in `#[ident(...)]`.
.take(leading_colons + leading_path)
.chain(iter::once(TokenTree::Group(group)))
.collect())
} else {
Ok(attr)
}
}
fn do_paste_name_value_attr(attr: TokenStream, span: Span, leading: usize) -> Result<TokenStream> {
let mut expanded = TokenStream::new();
let mut tokens = attr.into_iter().peekable();
expanded.extend(tokens.by_ref().take(leading + 1)); // `doc =`
let mut segments = segment::parse(&mut tokens)?;
for segment in &mut segments {
if let Segment::String(string) = segment {
if let Some(open_quote) = string.value.find('"') {
if open_quote == 0 {
string.value.truncate(string.value.len() - 1);
string.value.remove(0);
} else {
let begin = open_quote + 1;
let end = string.value.rfind('"').unwrap();
let raw_string = mem::replace(&mut string.value, String::new());
for ch in raw_string[begin..end].chars() {
string.value.extend(ch.escape_default());
}
}
}
}
}
let mut lit = segment::paste(&segments)?;
lit.insert(0, '"');
lit.push('"');
let mut lit = TokenStream::from_str(&lit)
.unwrap()
.into_iter()
.next()
.unwrap();
lit.set_span(span);
expanded.extend(iter::once(lit));
Ok(expanded)
}
fn is_stringlike(token: &TokenTree) -> bool {
match token {
TokenTree::Ident(_) => true,
TokenTree::Literal(literal) => {
let repr = literal.to_string();
!repr.starts_with('b') && !repr.starts_with('\'')
}
TokenTree::Group(group) => {
if group.delimiter() != Delimiter::None {
return false;
}
let mut inner = group.stream().into_iter();
match inner.next() {
Some(first) => inner.next().is_none() && is_stringlike(&first),
None => false,
}
}
TokenTree::Punct(punct) => {
punct.as_char() == '\'' || punct.as_char() == ':' && punct.spacing() == Spacing::Alone
}
}
}

47
vendor/paste/src/error.rs vendored Normal file
View File

@@ -0,0 +1,47 @@
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
use std::iter::FromIterator;
pub type Result<T> = std::result::Result<T, Error>;
pub struct Error {
begin: Span,
end: Span,
msg: String,
}
impl Error {
pub fn new(span: Span, msg: &str) -> Self {
Self::new2(span, span, msg)
}
pub fn new2(begin: Span, end: Span, msg: &str) -> Self {
Error {
begin,
end,
msg: msg.to_owned(),
}
}
pub fn to_compile_error(&self) -> TokenStream {
// compile_error! { $msg }
TokenStream::from_iter(vec![
TokenTree::Ident(Ident::new("compile_error", self.begin)),
TokenTree::Punct({
let mut punct = Punct::new('!', Spacing::Alone);
punct.set_span(self.begin);
punct
}),
TokenTree::Group({
let mut group = Group::new(Delimiter::Brace, {
TokenStream::from_iter(vec![TokenTree::Literal({
let mut string = Literal::string(&self.msg);
string.set_span(self.end);
string
})])
});
group.set_span(self.end);
group
}),
])
}
}

454
vendor/paste/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,454 @@
//! [![github]](https://github.com/dtolnay/paste)&ensp;[![crates-io]](https://crates.io/crates/paste)&ensp;[![docs-rs]](https://docs.rs/paste)
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
//!
//! <br>
//!
//! The nightly-only [`concat_idents!`] macro in the Rust standard library is
//! notoriously underpowered in that its concatenated identifiers can only refer to
//! existing items, they can never be used to define something new.
//!
//! [`concat_idents!`]: https://doc.rust-lang.org/std/macro.concat_idents.html
//!
//! This crate provides a flexible way to paste together identifiers in a macro,
//! including using pasted identifiers to define new items.
//!
//! This approach works with any Rust compiler 1.31+.
//!
//! <br>
//!
//! # Pasting identifiers
//!
//! Within the `paste!` macro, identifiers inside `[<`...`>]` are pasted
//! together to form a single identifier.
//!
//! ```
//! use paste::paste;
//!
//! paste! {
//! // Defines a const called `QRST`.
//! const [<Q R S T>]: &str = "success!";
//! }
//!
//! fn main() {
//! assert_eq!(
//! paste! { [<Q R S T>].len() },
//! 8,
//! );
//! }
//! ```
//!
//! <br><br>
//!
//! # More elaborate example
//!
//! The next example shows a macro that generates accessor methods for some
//! struct fields. It demonstrates how you might find it useful to bundle a
//! paste invocation inside of a macro\_rules macro.
//!
//! ```
//! use paste::paste;
//!
//! macro_rules! make_a_struct_and_getters {
//! ($name:ident { $($field:ident),* }) => {
//! // Define a struct. This expands to:
//! //
//! // pub struct S {
//! // a: String,
//! // b: String,
//! // c: String,
//! // }
//! pub struct $name {
//! $(
//! $field: String,
//! )*
//! }
//!
//! // Build an impl block with getters. This expands to:
//! //
//! // impl S {
//! // pub fn get_a(&self) -> &str { &self.a }
//! // pub fn get_b(&self) -> &str { &self.b }
//! // pub fn get_c(&self) -> &str { &self.c }
//! // }
//! paste! {
//! impl $name {
//! $(
//! pub fn [<get_ $field>](&self) -> &str {
//! &self.$field
//! }
//! )*
//! }
//! }
//! }
//! }
//!
//! make_a_struct_and_getters!(S { a, b, c });
//!
//! fn call_some_getters(s: &S) -> bool {
//! s.get_a() == s.get_b() && s.get_c().is_empty()
//! }
//! #
//! # fn main() {}
//! ```
//!
//! <br><br>
//!
//! # Case conversion
//!
//! Use `$var:lower` or `$var:upper` in the segment list to convert an
//! interpolated segment to lower- or uppercase as part of the paste. For
//! example, `[<ld_ $reg:lower _expr>]` would paste to `ld_bc_expr` if invoked
//! with $reg=`Bc`.
//!
//! Use `$var:snake` to convert CamelCase input to snake\_case.
//! Use `$var:camel` to convert snake\_case to CamelCase.
//! These compose, so for example `$var:snake:upper` would give you SCREAMING\_CASE.
//!
//! The precise Unicode conversions are as defined by [`str::to_lowercase`] and
//! [`str::to_uppercase`].
//!
//! [`str::to_lowercase`]: https://doc.rust-lang.org/std/primitive.str.html#method.to_lowercase
//! [`str::to_uppercase`]: https://doc.rust-lang.org/std/primitive.str.html#method.to_uppercase
//!
//! <br>
//!
//! # Pasting documentation strings
//!
//! Within the `paste!` macro, arguments to a #\[doc ...\] attribute are
//! implicitly concatenated together to form a coherent documentation string.
//!
//! ```
//! use paste::paste;
//!
//! macro_rules! method_new {
//! ($ret:ident) => {
//! paste! {
//! #[doc = "Create a new `" $ret "` object."]
//! pub fn new() -> $ret { todo!() }
//! }
//! };
//! }
//!
//! pub struct Paste {}
//!
//! method_new!(Paste); // expands to #[doc = "Create a new `Paste` object"]
//! ```
#![doc(html_root_url = "https://docs.rs/paste/1.0.15")]
#![allow(
clippy::derive_partial_eq_without_eq,
clippy::doc_markdown,
clippy::match_same_arms,
clippy::module_name_repetitions,
clippy::needless_doctest_main,
clippy::too_many_lines
)]
extern crate proc_macro;
mod attr;
mod error;
mod segment;
use crate::attr::expand_attr;
use crate::error::{Error, Result};
use crate::segment::Segment;
use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use std::char;
use std::iter;
use std::panic;
#[proc_macro]
pub fn paste(input: TokenStream) -> TokenStream {
let mut contains_paste = false;
let flatten_single_interpolation = true;
match expand(
input.clone(),
&mut contains_paste,
flatten_single_interpolation,
) {
Ok(expanded) => {
if contains_paste {
expanded
} else {
input
}
}
Err(err) => err.to_compile_error(),
}
}
#[doc(hidden)]
#[proc_macro]
pub fn item(input: TokenStream) -> TokenStream {
paste(input)
}
#[doc(hidden)]
#[proc_macro]
pub fn expr(input: TokenStream) -> TokenStream {
paste(input)
}
fn expand(
input: TokenStream,
contains_paste: &mut bool,
flatten_single_interpolation: bool,
) -> Result<TokenStream> {
let mut expanded = TokenStream::new();
let mut lookbehind = Lookbehind::Other;
let mut prev_none_group = None::<Group>;
let mut tokens = input.into_iter().peekable();
loop {
let token = tokens.next();
if let Some(group) = prev_none_group.take() {
if match (&token, tokens.peek()) {
(Some(TokenTree::Punct(fst)), Some(TokenTree::Punct(snd))) => {
fst.as_char() == ':' && snd.as_char() == ':' && fst.spacing() == Spacing::Joint
}
_ => false,
} {
expanded.extend(group.stream());
*contains_paste = true;
} else {
expanded.extend(iter::once(TokenTree::Group(group)));
}
}
match token {
Some(TokenTree::Group(group)) => {
let delimiter = group.delimiter();
let content = group.stream();
let span = group.span();
if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
let segments = parse_bracket_as_segments(content, span)?;
let pasted = segment::paste(&segments)?;
let tokens = pasted_to_tokens(pasted, span)?;
expanded.extend(tokens);
*contains_paste = true;
} else if flatten_single_interpolation
&& delimiter == Delimiter::None
&& is_single_interpolation_group(&content)
{
expanded.extend(content);
*contains_paste = true;
} else {
let mut group_contains_paste = false;
let is_attribute = delimiter == Delimiter::Bracket
&& (lookbehind == Lookbehind::Pound || lookbehind == Lookbehind::PoundBang);
let mut nested = expand(
content,
&mut group_contains_paste,
flatten_single_interpolation && !is_attribute,
)?;
if is_attribute {
nested = expand_attr(nested, span, &mut group_contains_paste)?;
}
let group = if group_contains_paste {
let mut group = Group::new(delimiter, nested);
group.set_span(span);
*contains_paste = true;
group
} else {
group.clone()
};
if delimiter != Delimiter::None {
expanded.extend(iter::once(TokenTree::Group(group)));
} else if lookbehind == Lookbehind::DoubleColon {
expanded.extend(group.stream());
*contains_paste = true;
} else {
prev_none_group = Some(group);
}
}
lookbehind = Lookbehind::Other;
}
Some(TokenTree::Punct(punct)) => {
lookbehind = match punct.as_char() {
':' if lookbehind == Lookbehind::JointColon => Lookbehind::DoubleColon,
':' if punct.spacing() == Spacing::Joint => Lookbehind::JointColon,
'#' => Lookbehind::Pound,
'!' if lookbehind == Lookbehind::Pound => Lookbehind::PoundBang,
_ => Lookbehind::Other,
};
expanded.extend(iter::once(TokenTree::Punct(punct)));
}
Some(other) => {
lookbehind = Lookbehind::Other;
expanded.extend(iter::once(other));
}
None => return Ok(expanded),
}
}
}
#[derive(PartialEq)]
enum Lookbehind {
JointColon,
DoubleColon,
Pound,
PoundBang,
Other,
}
// https://github.com/dtolnay/paste/issues/26
fn is_single_interpolation_group(input: &TokenStream) -> bool {
#[derive(PartialEq)]
enum State {
Init,
Ident,
Literal,
Apostrophe,
Lifetime,
Colon1,
Colon2,
}
let mut state = State::Init;
for tt in input.clone() {
state = match (state, &tt) {
(State::Init, TokenTree::Ident(_)) => State::Ident,
(State::Init, TokenTree::Literal(_)) => State::Literal,
(State::Init, TokenTree::Punct(punct)) if punct.as_char() == '\'' => State::Apostrophe,
(State::Apostrophe, TokenTree::Ident(_)) => State::Lifetime,
(State::Ident, TokenTree::Punct(punct))
if punct.as_char() == ':' && punct.spacing() == Spacing::Joint =>
{
State::Colon1
}
(State::Colon1, TokenTree::Punct(punct))
if punct.as_char() == ':' && punct.spacing() == Spacing::Alone =>
{
State::Colon2
}
(State::Colon2, TokenTree::Ident(_)) => State::Ident,
_ => return false,
};
}
state == State::Ident || state == State::Literal || state == State::Lifetime
}
fn is_paste_operation(input: &TokenStream) -> bool {
let mut tokens = input.clone().into_iter();
match &tokens.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
_ => return false,
}
let mut has_token = false;
loop {
match &tokens.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {
return has_token && tokens.next().is_none();
}
Some(_) => has_token = true,
None => return false,
}
}
}
fn parse_bracket_as_segments(input: TokenStream, scope: Span) -> Result<Vec<Segment>> {
let mut tokens = input.into_iter().peekable();
match &tokens.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
Some(wrong) => return Err(Error::new(wrong.span(), "expected `<`")),
None => return Err(Error::new(scope, "expected `[< ... >]`")),
}
let mut segments = segment::parse(&mut tokens)?;
match &tokens.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {}
Some(wrong) => return Err(Error::new(wrong.span(), "expected `>`")),
None => return Err(Error::new(scope, "expected `[< ... >]`")),
}
if let Some(unexpected) = tokens.next() {
return Err(Error::new(
unexpected.span(),
"unexpected input, expected `[< ... >]`",
));
}
for segment in &mut segments {
if let Segment::String(string) = segment {
if string.value.starts_with("'\\u{") {
let hex = &string.value[4..string.value.len() - 2];
if let Ok(unsigned) = u32::from_str_radix(hex, 16) {
if let Some(ch) = char::from_u32(unsigned) {
string.value.clear();
string.value.push(ch);
continue;
}
}
}
if string.value.contains(&['#', '\\', '.', '+'][..])
|| string.value.starts_with("b'")
|| string.value.starts_with("b\"")
|| string.value.starts_with("br\"")
{
return Err(Error::new(string.span, "unsupported literal"));
}
let mut range = 0..string.value.len();
if string.value.starts_with("r\"") {
range.start += 2;
range.end -= 1;
} else if string.value.starts_with(&['"', '\''][..]) {
range.start += 1;
range.end -= 1;
}
string.value = string.value[range].replace('-', "_");
}
}
Ok(segments)
}
fn pasted_to_tokens(mut pasted: String, span: Span) -> Result<TokenStream> {
let mut tokens = TokenStream::new();
#[cfg(not(no_literal_fromstr))]
{
use proc_macro::{LexError, Literal};
use std::str::FromStr;
if pasted.starts_with(|ch: char| ch.is_ascii_digit()) {
let literal = match panic::catch_unwind(|| Literal::from_str(&pasted)) {
Ok(Ok(literal)) => TokenTree::Literal(literal),
Ok(Err(LexError { .. })) | Err(_) => {
return Err(Error::new(
span,
&format!("`{:?}` is not a valid literal", pasted),
));
}
};
tokens.extend(iter::once(literal));
return Ok(tokens);
}
}
if pasted.starts_with('\'') {
let mut apostrophe = TokenTree::Punct(Punct::new('\'', Spacing::Joint));
apostrophe.set_span(span);
tokens.extend(iter::once(apostrophe));
pasted.remove(0);
}
let ident = match panic::catch_unwind(|| Ident::new(&pasted, span)) {
Ok(ident) => TokenTree::Ident(ident),
Err(_) => {
return Err(Error::new(
span,
&format!("`{:?}` is not a valid identifier", pasted),
));
}
};
tokens.extend(iter::once(ident));
Ok(tokens)
}

233
vendor/paste/src/segment.rs vendored Normal file
View File

@@ -0,0 +1,233 @@
use crate::error::{Error, Result};
use proc_macro::{token_stream, Delimiter, Ident, Span, TokenTree};
use std::iter::Peekable;
pub(crate) enum Segment {
String(LitStr),
Apostrophe(Span),
Env(LitStr),
Modifier(Colon, Ident),
}
pub(crate) struct LitStr {
pub value: String,
pub span: Span,
}
pub(crate) struct Colon {
pub span: Span,
}
pub(crate) fn parse(tokens: &mut Peekable<token_stream::IntoIter>) -> Result<Vec<Segment>> {
let mut segments = Vec::new();
while match tokens.peek() {
None => false,
Some(TokenTree::Punct(punct)) => punct.as_char() != '>',
Some(_) => true,
} {
match tokens.next().unwrap() {
TokenTree::Ident(ident) => {
let mut fragment = ident.to_string();
if fragment.starts_with("r#") {
fragment = fragment.split_off(2);
}
if fragment == "env"
&& match tokens.peek() {
Some(TokenTree::Punct(punct)) => punct.as_char() == '!',
_ => false,
}
{
let bang = tokens.next().unwrap(); // `!`
let expect_group = tokens.next();
let parenthesized = match &expect_group {
Some(TokenTree::Group(group))
if group.delimiter() == Delimiter::Parenthesis =>
{
group
}
Some(wrong) => return Err(Error::new(wrong.span(), "expected `(`")),
None => {
return Err(Error::new2(
ident.span(),
bang.span(),
"expected `(` after `env!`",
));
}
};
let mut inner = parenthesized.stream().into_iter();
let lit = match inner.next() {
Some(TokenTree::Literal(lit)) => lit,
Some(wrong) => {
return Err(Error::new(wrong.span(), "expected string literal"))
}
None => {
return Err(Error::new2(
ident.span(),
parenthesized.span(),
"expected string literal as argument to env! macro",
))
}
};
let lit_string = lit.to_string();
if lit_string.starts_with('"')
&& lit_string.ends_with('"')
&& lit_string.len() >= 2
{
// TODO: maybe handle escape sequences in the string if
// someone has a use case.
segments.push(Segment::Env(LitStr {
value: lit_string[1..lit_string.len() - 1].to_owned(),
span: lit.span(),
}));
} else {
return Err(Error::new(lit.span(), "expected string literal"));
}
if let Some(unexpected) = inner.next() {
return Err(Error::new(
unexpected.span(),
"unexpected token in env! macro",
));
}
} else {
segments.push(Segment::String(LitStr {
value: fragment,
span: ident.span(),
}));
}
}
TokenTree::Literal(lit) => {
segments.push(Segment::String(LitStr {
value: lit.to_string(),
span: lit.span(),
}));
}
TokenTree::Punct(punct) => match punct.as_char() {
'_' => segments.push(Segment::String(LitStr {
value: "_".to_owned(),
span: punct.span(),
})),
'\'' => segments.push(Segment::Apostrophe(punct.span())),
':' => {
let colon_span = punct.span();
let colon = Colon { span: colon_span };
let ident = match tokens.next() {
Some(TokenTree::Ident(ident)) => ident,
wrong => {
let span = wrong.as_ref().map_or(colon_span, TokenTree::span);
return Err(Error::new(span, "expected identifier after `:`"));
}
};
segments.push(Segment::Modifier(colon, ident));
}
_ => return Err(Error::new(punct.span(), "unexpected punct")),
},
TokenTree::Group(group) => {
if group.delimiter() == Delimiter::None {
let mut inner = group.stream().into_iter().peekable();
let nested = parse(&mut inner)?;
if let Some(unexpected) = inner.next() {
return Err(Error::new(unexpected.span(), "unexpected token"));
}
segments.extend(nested);
} else {
return Err(Error::new(group.span(), "unexpected token"));
}
}
}
}
Ok(segments)
}
pub(crate) fn paste(segments: &[Segment]) -> Result<String> {
let mut evaluated = Vec::new();
let mut is_lifetime = false;
for segment in segments {
match segment {
Segment::String(segment) => {
evaluated.push(segment.value.clone());
}
Segment::Apostrophe(span) => {
if is_lifetime {
return Err(Error::new(*span, "unexpected lifetime"));
}
is_lifetime = true;
}
Segment::Env(var) => {
let resolved = match std::env::var(&var.value) {
Ok(resolved) => resolved,
Err(_) => {
return Err(Error::new(
var.span,
&format!("no such env var: {:?}", var.value),
));
}
};
let resolved = resolved.replace('-', "_");
evaluated.push(resolved);
}
Segment::Modifier(colon, ident) => {
let last = match evaluated.pop() {
Some(last) => last,
None => {
return Err(Error::new2(colon.span, ident.span(), "unexpected modifier"))
}
};
match ident.to_string().as_str() {
"lower" => {
evaluated.push(last.to_lowercase());
}
"upper" => {
evaluated.push(last.to_uppercase());
}
"snake" => {
let mut acc = String::new();
let mut prev = '_';
for ch in last.chars() {
if ch.is_uppercase() && prev != '_' {
acc.push('_');
}
acc.push(ch);
prev = ch;
}
evaluated.push(acc.to_lowercase());
}
"camel" => {
let mut acc = String::new();
let mut prev = '_';
for ch in last.chars() {
if ch != '_' {
if prev == '_' {
for chu in ch.to_uppercase() {
acc.push(chu);
}
} else if prev.is_uppercase() {
for chl in ch.to_lowercase() {
acc.push(chl);
}
} else {
acc.push(ch);
}
}
prev = ch;
}
evaluated.push(acc);
}
_ => {
return Err(Error::new2(
colon.span,
ident.span(),
"unsupported modifier",
));
}
}
}
}
}
let mut pasted = evaluated.into_iter().collect::<String>();
if is_lifetime {
pasted.insert(0, '\'');
}
Ok(pasted)
}