Vendor dependencies for 0.3.0 release

This commit is contained in:
2025-09-27 10:29:08 -05:00
parent 0c8d39d483
commit 82ab7f317b
26803 changed files with 16134934 additions and 0 deletions

386
vendor/prettyplease/src/algorithm.rs vendored Normal file
View File

@@ -0,0 +1,386 @@
// Adapted from https://github.com/rust-lang/rust/blob/1.57.0/compiler/rustc_ast_pretty/src/pp.rs.
// See "Algorithm notes" in the crate-level rustdoc.
use crate::ring::RingBuffer;
use crate::{MARGIN, MIN_SPACE};
use std::borrow::Cow;
use std::cmp;
use std::collections::VecDeque;
use std::iter;
#[derive(Clone, Copy, PartialEq)]
pub enum Breaks {
Consistent,
Inconsistent,
}
#[derive(Clone, Copy, Default)]
pub struct BreakToken {
pub offset: isize,
pub blank_space: usize,
pub pre_break: Option<char>,
pub post_break: &'static str,
pub no_break: Option<char>,
pub if_nonempty: bool,
pub never_break: bool,
}
#[derive(Clone, Copy)]
pub struct BeginToken {
pub offset: isize,
pub breaks: Breaks,
}
#[derive(Clone)]
pub enum Token {
String(Cow<'static, str>),
Break(BreakToken),
Begin(BeginToken),
End,
}
#[derive(Copy, Clone)]
enum PrintFrame {
Fits(Breaks),
Broken(usize, Breaks),
}
pub const SIZE_INFINITY: isize = 0xffff;
pub struct Printer {
out: String,
// Number of spaces left on line
space: isize,
// Ring-buffer of tokens and calculated sizes
buf: RingBuffer<BufEntry>,
// Total size of tokens already printed
left_total: isize,
// Total size of tokens enqueued, including printed and not yet printed
right_total: isize,
// Holds the ring-buffer index of the Begin that started the current block,
// possibly with the most recent Break after that Begin (if there is any) on
// top of it. Values are pushed and popped on the back of the queue using it
// like stack, and elsewhere old values are popped from the front of the
// queue as they become irrelevant due to the primary ring-buffer advancing.
scan_stack: VecDeque<usize>,
// Stack of blocks-in-progress being flushed by print
print_stack: Vec<PrintFrame>,
// Level of indentation of current line
indent: usize,
// Buffered indentation to avoid writing trailing whitespace
pending_indentation: usize,
}
#[derive(Clone)]
struct BufEntry {
token: Token,
size: isize,
}
impl Printer {
pub fn new() -> Self {
Printer {
out: String::new(),
space: MARGIN,
buf: RingBuffer::new(),
left_total: 0,
right_total: 0,
scan_stack: VecDeque::new(),
print_stack: Vec::new(),
indent: 0,
pending_indentation: 0,
}
}
pub fn eof(mut self) -> String {
if !self.scan_stack.is_empty() {
self.check_stack(0);
self.advance_left();
}
self.out
}
pub fn scan_begin(&mut self, token: BeginToken) {
if self.scan_stack.is_empty() {
self.left_total = 1;
self.right_total = 1;
self.buf.clear();
}
let right = self.buf.push(BufEntry {
token: Token::Begin(token),
size: -self.right_total,
});
self.scan_stack.push_back(right);
}
pub fn scan_end(&mut self) {
if self.scan_stack.is_empty() {
self.print_end();
} else {
if !self.buf.is_empty() {
if let Token::Break(break_token) = self.buf.last().token {
if self.buf.len() >= 2 {
if let Token::Begin(_) = self.buf.second_last().token {
self.buf.pop_last();
self.buf.pop_last();
self.scan_stack.pop_back();
self.scan_stack.pop_back();
self.right_total -= break_token.blank_space as isize;
return;
}
}
if break_token.if_nonempty {
self.buf.pop_last();
self.scan_stack.pop_back();
self.right_total -= break_token.blank_space as isize;
}
}
}
let right = self.buf.push(BufEntry {
token: Token::End,
size: -1,
});
self.scan_stack.push_back(right);
}
}
pub fn scan_break(&mut self, token: BreakToken) {
if self.scan_stack.is_empty() {
self.left_total = 1;
self.right_total = 1;
self.buf.clear();
} else {
self.check_stack(0);
}
let right = self.buf.push(BufEntry {
token: Token::Break(token),
size: -self.right_total,
});
self.scan_stack.push_back(right);
self.right_total += token.blank_space as isize;
}
pub fn scan_string(&mut self, string: Cow<'static, str>) {
if self.scan_stack.is_empty() {
self.print_string(string);
} else {
let len = string.len() as isize;
self.buf.push(BufEntry {
token: Token::String(string),
size: len,
});
self.right_total += len;
self.check_stream();
}
}
#[track_caller]
pub fn offset(&mut self, offset: isize) {
match &mut self.buf.last_mut().token {
Token::Break(token) => token.offset += offset,
Token::Begin(_) => {}
Token::String(_) | Token::End => unreachable!(),
}
}
pub fn end_with_max_width(&mut self, max: isize) {
let mut depth = 1;
for &index in self.scan_stack.iter().rev() {
let entry = &self.buf[index];
match entry.token {
Token::Begin(_) => {
depth -= 1;
if depth == 0 {
if entry.size < 0 {
let actual_width = entry.size + self.right_total;
if actual_width > max {
self.buf.push(BufEntry {
token: Token::String(Cow::Borrowed("")),
size: SIZE_INFINITY,
});
self.right_total += SIZE_INFINITY;
}
}
break;
}
}
Token::End => depth += 1,
Token::Break(_) => {}
Token::String(_) => unreachable!(),
}
}
self.scan_end();
}
pub fn ends_with(&self, ch: char) -> bool {
for i in self.buf.index_range().rev() {
if let Token::String(token) = &self.buf[i].token {
return token.ends_with(ch);
}
}
self.out.ends_with(ch)
}
fn check_stream(&mut self) {
while self.right_total - self.left_total > self.space {
if *self.scan_stack.front().unwrap() == self.buf.index_range().start {
self.scan_stack.pop_front().unwrap();
self.buf.first_mut().size = SIZE_INFINITY;
}
self.advance_left();
if self.buf.is_empty() {
break;
}
}
}
fn advance_left(&mut self) {
while self.buf.first().size >= 0 {
let left = self.buf.pop_first();
match left.token {
Token::String(string) => {
self.left_total += left.size;
self.print_string(string);
}
Token::Break(token) => {
self.left_total += token.blank_space as isize;
self.print_break(token, left.size);
}
Token::Begin(token) => self.print_begin(token, left.size),
Token::End => self.print_end(),
}
if self.buf.is_empty() {
break;
}
}
}
fn check_stack(&mut self, mut depth: usize) {
while let Some(&index) = self.scan_stack.back() {
let entry = &mut self.buf[index];
match entry.token {
Token::Begin(_) => {
if depth == 0 {
break;
}
self.scan_stack.pop_back().unwrap();
entry.size += self.right_total;
depth -= 1;
}
Token::End => {
self.scan_stack.pop_back().unwrap();
entry.size = 1;
depth += 1;
}
Token::Break(_) => {
self.scan_stack.pop_back().unwrap();
entry.size += self.right_total;
if depth == 0 {
break;
}
}
Token::String(_) => unreachable!(),
}
}
}
fn get_top(&self) -> PrintFrame {
const OUTER: PrintFrame = PrintFrame::Broken(0, Breaks::Inconsistent);
self.print_stack.last().map_or(OUTER, PrintFrame::clone)
}
fn print_begin(&mut self, token: BeginToken, size: isize) {
if cfg!(prettyplease_debug) {
self.out.push(match token.breaks {
Breaks::Consistent => '«',
Breaks::Inconsistent => '',
});
if cfg!(prettyplease_debug_indent) {
self.out
.extend(token.offset.to_string().chars().map(|ch| match ch {
'0'..='9' => ['₀', '₁', '₂', '₃', '₄', '₅', '₆', '₇', '₈', '₉']
[(ch as u8 - b'0') as usize],
'-' => '₋',
_ => unreachable!(),
}));
}
}
if size > self.space {
self.print_stack
.push(PrintFrame::Broken(self.indent, token.breaks));
self.indent = usize::try_from(self.indent as isize + token.offset).unwrap();
} else {
self.print_stack.push(PrintFrame::Fits(token.breaks));
}
}
fn print_end(&mut self) {
let breaks = match self.print_stack.pop().unwrap() {
PrintFrame::Broken(indent, breaks) => {
self.indent = indent;
breaks
}
PrintFrame::Fits(breaks) => breaks,
};
if cfg!(prettyplease_debug) {
self.out.push(match breaks {
Breaks::Consistent => '»',
Breaks::Inconsistent => '',
});
}
}
fn print_break(&mut self, token: BreakToken, size: isize) {
let fits = token.never_break
|| match self.get_top() {
PrintFrame::Fits(..) => true,
PrintFrame::Broken(.., Breaks::Consistent) => false,
PrintFrame::Broken(.., Breaks::Inconsistent) => size <= self.space,
};
if fits {
self.pending_indentation += token.blank_space;
self.space -= token.blank_space as isize;
if let Some(no_break) = token.no_break {
self.out.push(no_break);
self.space -= no_break.len_utf8() as isize;
}
if cfg!(prettyplease_debug) {
self.out.push('·');
}
} else {
if let Some(pre_break) = token.pre_break {
self.print_indent();
self.out.push(pre_break);
}
if cfg!(prettyplease_debug) {
self.out.push('·');
}
self.out.push('\n');
let indent = self.indent as isize + token.offset;
self.pending_indentation = usize::try_from(indent).unwrap();
self.space = cmp::max(MARGIN - indent, MIN_SPACE);
if !token.post_break.is_empty() {
self.print_indent();
self.out.push_str(token.post_break);
self.space -= token.post_break.len() as isize;
}
}
}
fn print_string(&mut self, string: Cow<'static, str>) {
self.print_indent();
self.out.push_str(&string);
self.space -= string.len() as isize;
}
fn print_indent(&mut self) {
self.out.reserve(self.pending_indentation);
self.out
.extend(iter::repeat(' ').take(self.pending_indentation));
self.pending_indentation = 0;
}
}

288
vendor/prettyplease/src/attr.rs vendored Normal file
View File

@@ -0,0 +1,288 @@
use crate::algorithm::Printer;
use crate::fixup::FixupContext;
use crate::path::PathKind;
use crate::INDENT;
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
use syn::{AttrStyle, Attribute, Expr, Lit, MacroDelimiter, Meta, MetaList, MetaNameValue};
impl Printer {
pub fn outer_attrs(&mut self, attrs: &[Attribute]) {
for attr in attrs {
if let AttrStyle::Outer = attr.style {
self.attr(attr);
}
}
}
pub fn inner_attrs(&mut self, attrs: &[Attribute]) {
for attr in attrs {
if let AttrStyle::Inner(_) = attr.style {
self.attr(attr);
}
}
}
fn attr(&mut self, attr: &Attribute) {
if let Some(mut doc) = value_of_attribute("doc", attr) {
if !doc.contains('\n')
&& match attr.style {
AttrStyle::Outer => !doc.starts_with('/'),
AttrStyle::Inner(_) => true,
}
{
trim_trailing_spaces(&mut doc);
self.word(match attr.style {
AttrStyle::Outer => "///",
AttrStyle::Inner(_) => "//!",
});
self.word(doc);
self.hardbreak();
return;
} else if can_be_block_comment(&doc)
&& match attr.style {
AttrStyle::Outer => !doc.starts_with(&['*', '/'][..]),
AttrStyle::Inner(_) => true,
}
{
trim_interior_trailing_spaces(&mut doc);
self.word(match attr.style {
AttrStyle::Outer => "/**",
AttrStyle::Inner(_) => "/*!",
});
self.word(doc);
self.word("*/");
self.hardbreak();
return;
}
} else if let Some(mut comment) = value_of_attribute("comment", attr) {
if !comment.contains('\n') {
trim_trailing_spaces(&mut comment);
self.word("//");
self.word(comment);
self.hardbreak();
return;
} else if can_be_block_comment(&comment) && !comment.starts_with(&['*', '!'][..]) {
trim_interior_trailing_spaces(&mut comment);
self.word("/*");
self.word(comment);
self.word("*/");
self.hardbreak();
return;
}
}
self.word(match attr.style {
AttrStyle::Outer => "#",
AttrStyle::Inner(_) => "#!",
});
self.word("[");
self.meta(&attr.meta);
self.word("]");
self.space();
}
fn meta(&mut self, meta: &Meta) {
match meta {
Meta::Path(path) => self.path(path, PathKind::Simple),
Meta::List(meta) => self.meta_list(meta),
Meta::NameValue(meta) => self.meta_name_value(meta),
}
}
fn meta_list(&mut self, meta: &MetaList) {
self.path(&meta.path, PathKind::Simple);
let delimiter = match meta.delimiter {
MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
MacroDelimiter::Brace(_) => Delimiter::Brace,
MacroDelimiter::Bracket(_) => Delimiter::Bracket,
};
let group = Group::new(delimiter, meta.tokens.clone());
self.attr_tokens(TokenStream::from(TokenTree::Group(group)));
}
fn meta_name_value(&mut self, meta: &MetaNameValue) {
self.path(&meta.path, PathKind::Simple);
self.word(" = ");
self.expr(&meta.value, FixupContext::NONE);
}
fn attr_tokens(&mut self, tokens: TokenStream) {
let mut stack = Vec::new();
stack.push((tokens.into_iter().peekable(), Delimiter::None));
let mut space = Self::nbsp as fn(&mut Self);
#[derive(PartialEq)]
enum State {
Word,
Punct,
TrailingComma,
}
use State::*;
let mut state = Word;
while let Some((tokens, delimiter)) = stack.last_mut() {
match tokens.next() {
Some(TokenTree::Ident(ident)) => {
if let Word = state {
space(self);
}
self.ident(&ident);
state = Word;
}
Some(TokenTree::Punct(punct)) => {
let ch = punct.as_char();
if let (Word, '=') = (state, ch) {
self.nbsp();
}
if ch == ',' && tokens.peek().is_none() {
self.trailing_comma(true);
state = TrailingComma;
} else {
self.token_punct(ch);
if ch == '=' {
self.nbsp();
} else if ch == ',' {
space(self);
}
state = Punct;
}
}
Some(TokenTree::Literal(literal)) => {
if let Word = state {
space(self);
}
self.token_literal(&literal);
state = Word;
}
Some(TokenTree::Group(group)) => {
let delimiter = group.delimiter();
let stream = group.stream();
match delimiter {
Delimiter::Parenthesis => {
self.word("(");
self.cbox(INDENT);
self.zerobreak();
state = Punct;
}
Delimiter::Brace => {
self.word("{");
state = Punct;
}
Delimiter::Bracket => {
self.word("[");
state = Punct;
}
Delimiter::None => {}
}
stack.push((stream.into_iter().peekable(), delimiter));
space = Self::space;
}
None => {
match delimiter {
Delimiter::Parenthesis => {
if state != TrailingComma {
self.zerobreak();
}
self.offset(-INDENT);
self.end();
self.word(")");
state = Punct;
}
Delimiter::Brace => {
self.word("}");
state = Punct;
}
Delimiter::Bracket => {
self.word("]");
state = Punct;
}
Delimiter::None => {}
}
stack.pop();
if stack.is_empty() {
space = Self::nbsp;
}
}
}
}
}
}
fn value_of_attribute(requested: &str, attr: &Attribute) -> Option<String> {
let value = match &attr.meta {
Meta::NameValue(meta) if meta.path.is_ident(requested) => &meta.value,
_ => return None,
};
let lit = match value {
Expr::Lit(expr) if expr.attrs.is_empty() => &expr.lit,
_ => return None,
};
match lit {
Lit::Str(string) => Some(string.value()),
_ => None,
}
}
pub fn has_outer(attrs: &[Attribute]) -> bool {
for attr in attrs {
if let AttrStyle::Outer = attr.style {
return true;
}
}
false
}
pub fn has_inner(attrs: &[Attribute]) -> bool {
for attr in attrs {
if let AttrStyle::Inner(_) = attr.style {
return true;
}
}
false
}
fn trim_trailing_spaces(doc: &mut String) {
doc.truncate(doc.trim_end_matches(' ').len());
}
fn trim_interior_trailing_spaces(doc: &mut String) {
if !doc.contains(" \n") {
return;
}
let mut trimmed = String::with_capacity(doc.len());
let mut lines = doc.split('\n').peekable();
while let Some(line) = lines.next() {
if lines.peek().is_some() {
trimmed.push_str(line.trim_end_matches(' '));
trimmed.push('\n');
} else {
trimmed.push_str(line);
}
}
*doc = trimmed;
}
fn can_be_block_comment(value: &str) -> bool {
let mut depth = 0usize;
let bytes = value.as_bytes();
let mut i = 0usize;
let upper = bytes.len() - 1;
while i < upper {
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
i += 2;
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
if depth == 0 {
return false;
}
depth -= 1;
i += 2;
} else {
i += 1;
}
}
depth == 0 && !value.ends_with('/')
}

324
vendor/prettyplease/src/classify.rs vendored Normal file
View File

@@ -0,0 +1,324 @@
use proc_macro2::{Delimiter, TokenStream, TokenTree};
use std::ops::ControlFlow;
use syn::punctuated::Punctuated;
use syn::{Expr, MacroDelimiter, Path, PathArguments, ReturnType, Token, Type, TypeParamBound};
pub(crate) fn requires_semi_to_be_stmt(expr: &Expr) -> bool {
match expr {
Expr::Macro(expr) => !matches!(expr.mac.delimiter, MacroDelimiter::Brace(_)),
_ => requires_comma_to_be_match_arm(expr),
}
}
pub(crate) fn requires_comma_to_be_match_arm(mut expr: &Expr) -> bool {
loop {
match expr {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Expr::If(_)
| Expr::Match(_)
| Expr::Block(_) | Expr::Unsafe(_) // both under ExprKind::Block in rustc
| Expr::While(_)
| Expr::Loop(_)
| Expr::ForLoop(_)
| Expr::TryBlock(_)
| Expr::Const(_) => return false,
Expr::Array(_)
| Expr::Assign(_)
| Expr::Async(_)
| Expr::Await(_)
| Expr::Binary(_)
| Expr::Break(_)
| Expr::Call(_)
| Expr::Cast(_)
| Expr::Closure(_)
| Expr::Continue(_)
| Expr::Field(_)
| Expr::Index(_)
| Expr::Infer(_)
| Expr::Let(_)
| Expr::Lit(_)
| Expr::Macro(_)
| Expr::MethodCall(_)
| Expr::Paren(_)
| Expr::Path(_)
| Expr::Range(_)
| Expr::RawAddr(_)
| Expr::Reference(_)
| Expr::Repeat(_)
| Expr::Return(_)
| Expr::Struct(_)
| Expr::Try(_)
| Expr::Tuple(_)
| Expr::Unary(_)
| Expr::Yield(_)
| Expr::Verbatim(_) => return true,
Expr::Group(group) => expr = &group.expr,
_ => return true,
}
}
}
pub(crate) fn trailing_unparameterized_path(mut ty: &Type) -> bool {
loop {
match ty {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Type::BareFn(t) => match &t.output {
ReturnType::Default => return false,
ReturnType::Type(_, ret) => ty = ret,
},
Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) {
ControlFlow::Break(trailing_path) => return trailing_path,
ControlFlow::Continue(t) => ty = t,
},
Type::Path(t) => match last_type_in_path(&t.path) {
ControlFlow::Break(trailing_path) => return trailing_path,
ControlFlow::Continue(t) => ty = t,
},
Type::Ptr(t) => ty = &t.elem,
Type::Reference(t) => ty = &t.elem,
Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) {
ControlFlow::Break(trailing_path) => return trailing_path,
ControlFlow::Continue(t) => ty = t,
},
Type::Array(_)
| Type::Group(_)
| Type::Infer(_)
| Type::Macro(_)
| Type::Never(_)
| Type::Paren(_)
| Type::Slice(_)
| Type::Tuple(_)
| Type::Verbatim(_) => return false,
_ => return false,
}
}
fn last_type_in_path(path: &Path) -> ControlFlow<bool, &Type> {
match &path.segments.last().unwrap().arguments {
PathArguments::None => ControlFlow::Break(true),
PathArguments::AngleBracketed(_) => ControlFlow::Break(false),
PathArguments::Parenthesized(arg) => match &arg.output {
ReturnType::Default => ControlFlow::Break(false),
ReturnType::Type(_, ret) => ControlFlow::Continue(ret),
},
}
}
fn last_type_in_bounds(
bounds: &Punctuated<TypeParamBound, Token![+]>,
) -> ControlFlow<bool, &Type> {
match bounds.last().unwrap() {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
TypeParamBound::Trait(t) => last_type_in_path(&t.path),
TypeParamBound::Lifetime(_)
| TypeParamBound::PreciseCapture(_)
| TypeParamBound::Verbatim(_) => ControlFlow::Break(false),
_ => ControlFlow::Break(false),
}
}
}
/// Whether the expression's first token is the label of a loop/block.
pub(crate) fn expr_leading_label(mut expr: &Expr) -> bool {
loop {
match expr {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Expr::Block(e) => return e.label.is_some(),
Expr::ForLoop(e) => return e.label.is_some(),
Expr::Loop(e) => return e.label.is_some(),
Expr::While(e) => return e.label.is_some(),
Expr::Assign(e) => expr = &e.left,
Expr::Await(e) => expr = &e.base,
Expr::Binary(e) => expr = &e.left,
Expr::Call(e) => expr = &e.func,
Expr::Cast(e) => expr = &e.expr,
Expr::Field(e) => expr = &e.base,
Expr::Index(e) => expr = &e.expr,
Expr::MethodCall(e) => expr = &e.receiver,
Expr::Range(e) => match &e.start {
Some(start) => expr = start,
None => return false,
},
Expr::Try(e) => expr = &e.expr,
Expr::Array(_)
| Expr::Async(_)
| Expr::Break(_)
| Expr::Closure(_)
| Expr::Const(_)
| Expr::Continue(_)
| Expr::If(_)
| Expr::Infer(_)
| Expr::Let(_)
| Expr::Lit(_)
| Expr::Macro(_)
| Expr::Match(_)
| Expr::Paren(_)
| Expr::Path(_)
| Expr::RawAddr(_)
| Expr::Reference(_)
| Expr::Repeat(_)
| Expr::Return(_)
| Expr::Struct(_)
| Expr::TryBlock(_)
| Expr::Tuple(_)
| Expr::Unary(_)
| Expr::Unsafe(_)
| Expr::Verbatim(_)
| Expr::Yield(_) => return false,
Expr::Group(e) => {
if !e.attrs.is_empty() {
return false;
}
expr = &e.expr;
}
_ => return false,
}
}
}
/// Whether the expression's last token is `}`.
pub(crate) fn expr_trailing_brace(mut expr: &Expr) -> bool {
loop {
match expr {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Expr::Async(_)
| Expr::Block(_)
| Expr::Const(_)
| Expr::ForLoop(_)
| Expr::If(_)
| Expr::Loop(_)
| Expr::Match(_)
| Expr::Struct(_)
| Expr::TryBlock(_)
| Expr::Unsafe(_)
| Expr::While(_) => return true,
Expr::Assign(e) => expr = &e.right,
Expr::Binary(e) => expr = &e.right,
Expr::Break(e) => match &e.expr {
Some(e) => expr = e,
None => return false,
},
Expr::Cast(e) => return type_trailing_brace(&e.ty),
Expr::Closure(e) => expr = &e.body,
Expr::Group(e) => expr = &e.expr,
Expr::Let(e) => expr = &e.expr,
Expr::Macro(e) => return matches!(e.mac.delimiter, MacroDelimiter::Brace(_)),
Expr::Range(e) => match &e.end {
Some(end) => expr = end,
None => return false,
},
Expr::RawAddr(e) => expr = &e.expr,
Expr::Reference(e) => expr = &e.expr,
Expr::Return(e) => match &e.expr {
Some(e) => expr = e,
None => return false,
},
Expr::Unary(e) => expr = &e.expr,
Expr::Verbatim(e) => return tokens_trailing_brace(e),
Expr::Yield(e) => match &e.expr {
Some(e) => expr = e,
None => return false,
},
Expr::Array(_)
| Expr::Await(_)
| Expr::Call(_)
| Expr::Continue(_)
| Expr::Field(_)
| Expr::Index(_)
| Expr::Infer(_)
| Expr::Lit(_)
| Expr::MethodCall(_)
| Expr::Paren(_)
| Expr::Path(_)
| Expr::Repeat(_)
| Expr::Try(_)
| Expr::Tuple(_) => return false,
_ => return false,
}
}
fn type_trailing_brace(mut ty: &Type) -> bool {
loop {
match ty {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Type::BareFn(t) => match &t.output {
ReturnType::Default => return false,
ReturnType::Type(_, ret) => ty = ret,
},
Type::ImplTrait(t) => match last_type_in_bounds(&t.bounds) {
ControlFlow::Break(trailing_brace) => return trailing_brace,
ControlFlow::Continue(t) => ty = t,
},
Type::Macro(t) => return matches!(t.mac.delimiter, MacroDelimiter::Brace(_)),
Type::Path(t) => match last_type_in_path(&t.path) {
Some(t) => ty = t,
None => return false,
},
Type::Ptr(t) => ty = &t.elem,
Type::Reference(t) => ty = &t.elem,
Type::TraitObject(t) => match last_type_in_bounds(&t.bounds) {
ControlFlow::Break(trailing_brace) => return trailing_brace,
ControlFlow::Continue(t) => ty = t,
},
Type::Verbatim(t) => return tokens_trailing_brace(t),
Type::Array(_)
| Type::Group(_)
| Type::Infer(_)
| Type::Never(_)
| Type::Paren(_)
| Type::Slice(_)
| Type::Tuple(_) => return false,
_ => return false,
}
}
}
fn last_type_in_path(path: &Path) -> Option<&Type> {
match &path.segments.last().unwrap().arguments {
PathArguments::None | PathArguments::AngleBracketed(_) => None,
PathArguments::Parenthesized(arg) => match &arg.output {
ReturnType::Default => None,
ReturnType::Type(_, ret) => Some(ret),
},
}
}
fn last_type_in_bounds(
bounds: &Punctuated<TypeParamBound, Token![+]>,
) -> ControlFlow<bool, &Type> {
match bounds.last().unwrap() {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
TypeParamBound::Trait(t) => match last_type_in_path(&t.path) {
Some(t) => ControlFlow::Continue(t),
None => ControlFlow::Break(false),
},
TypeParamBound::Lifetime(_) | TypeParamBound::PreciseCapture(_) => {
ControlFlow::Break(false)
}
TypeParamBound::Verbatim(t) => ControlFlow::Break(tokens_trailing_brace(t)),
_ => ControlFlow::Break(false),
}
}
fn tokens_trailing_brace(tokens: &TokenStream) -> bool {
if let Some(TokenTree::Group(last)) = tokens.clone().into_iter().last() {
last.delimiter() == Delimiter::Brace
} else {
false
}
}
}

98
vendor/prettyplease/src/convenience.rs vendored Normal file
View File

@@ -0,0 +1,98 @@
use crate::algorithm::{self, BeginToken, BreakToken, Breaks, Printer};
use std::borrow::Cow;
impl Printer {
pub fn ibox(&mut self, indent: isize) {
self.scan_begin(BeginToken {
offset: indent,
breaks: Breaks::Inconsistent,
});
}
pub fn cbox(&mut self, indent: isize) {
self.scan_begin(BeginToken {
offset: indent,
breaks: Breaks::Consistent,
});
}
pub fn end(&mut self) {
self.scan_end();
}
pub fn word<S: Into<Cow<'static, str>>>(&mut self, wrd: S) {
let s = wrd.into();
self.scan_string(s);
}
fn spaces(&mut self, n: usize) {
self.scan_break(BreakToken {
blank_space: n,
..BreakToken::default()
});
}
pub fn zerobreak(&mut self) {
self.spaces(0);
}
pub fn space(&mut self) {
self.spaces(1);
}
pub fn nbsp(&mut self) {
self.word(" ");
}
pub fn hardbreak(&mut self) {
self.spaces(algorithm::SIZE_INFINITY as usize);
}
pub fn space_if_nonempty(&mut self) {
self.scan_break(BreakToken {
blank_space: 1,
if_nonempty: true,
..BreakToken::default()
});
}
pub fn hardbreak_if_nonempty(&mut self) {
self.scan_break(BreakToken {
blank_space: algorithm::SIZE_INFINITY as usize,
if_nonempty: true,
..BreakToken::default()
});
}
pub fn trailing_comma(&mut self, is_last: bool) {
if is_last {
self.scan_break(BreakToken {
pre_break: Some(','),
..BreakToken::default()
});
} else {
self.word(",");
self.space();
}
}
pub fn trailing_comma_or_space(&mut self, is_last: bool) {
if is_last {
self.scan_break(BreakToken {
blank_space: 1,
pre_break: Some(','),
..BreakToken::default()
});
} else {
self.word(",");
self.space();
}
}
pub fn neverbreak(&mut self) {
self.scan_break(BreakToken {
never_break: true,
..BreakToken::default()
});
}
}

79
vendor/prettyplease/src/data.rs vendored Normal file
View File

@@ -0,0 +1,79 @@
use crate::algorithm::Printer;
use crate::fixup::FixupContext;
use crate::iter::IterDelimited;
use crate::path::PathKind;
use crate::INDENT;
use syn::{Field, Fields, FieldsUnnamed, Variant, VisRestricted, Visibility};
impl Printer {
pub fn variant(&mut self, variant: &Variant) {
self.outer_attrs(&variant.attrs);
self.ident(&variant.ident);
match &variant.fields {
Fields::Named(fields) => {
self.nbsp();
self.word("{");
self.cbox(INDENT);
self.space();
for field in fields.named.iter().delimited() {
self.field(&field);
self.trailing_comma_or_space(field.is_last);
}
self.offset(-INDENT);
self.end();
self.word("}");
}
Fields::Unnamed(fields) => {
self.cbox(INDENT);
self.fields_unnamed(fields);
self.end();
}
Fields::Unit => {}
}
if let Some((_eq_token, discriminant)) = &variant.discriminant {
self.word(" = ");
self.expr(discriminant, FixupContext::NONE);
}
}
pub fn fields_unnamed(&mut self, fields: &FieldsUnnamed) {
self.word("(");
self.zerobreak();
for field in fields.unnamed.iter().delimited() {
self.field(&field);
self.trailing_comma(field.is_last);
}
self.offset(-INDENT);
self.word(")");
}
pub fn field(&mut self, field: &Field) {
self.outer_attrs(&field.attrs);
self.visibility(&field.vis);
if let Some(ident) = &field.ident {
self.ident(ident);
self.word(": ");
}
self.ty(&field.ty);
}
pub fn visibility(&mut self, vis: &Visibility) {
match vis {
Visibility::Public(_) => self.word("pub "),
Visibility::Restricted(vis) => self.vis_restricted(vis),
Visibility::Inherited => {}
}
}
fn vis_restricted(&mut self, vis: &VisRestricted) {
self.word("pub(");
let omit_in = vis.path.get_ident().map_or(false, |ident| {
matches!(ident.to_string().as_str(), "self" | "super" | "crate")
});
if !omit_in {
self.word("in ");
}
self.path(&vis.path, PathKind::Simple);
self.word(") ");
}
}

1533
vendor/prettyplease/src/expr.rs vendored Normal file

File diff suppressed because it is too large Load Diff

17
vendor/prettyplease/src/file.rs vendored Normal file
View File

@@ -0,0 +1,17 @@
use crate::algorithm::Printer;
use syn::File;
impl Printer {
pub fn file(&mut self, file: &File) {
self.cbox(0);
if let Some(shebang) = &file.shebang {
self.word(shebang.clone());
self.hardbreak();
}
self.inner_attrs(&file.attrs);
for item in &file.items {
self.item(item);
}
self.end();
}
}

676
vendor/prettyplease/src/fixup.rs vendored Normal file
View File

@@ -0,0 +1,676 @@
use crate::classify;
use crate::precedence::Precedence;
use syn::{
Expr, ExprBreak, ExprRange, ExprRawAddr, ExprReference, ExprReturn, ExprUnary, ExprYield,
};
#[derive(Copy, Clone)]
pub struct FixupContext {
previous_operator: Precedence,
next_operator: Precedence,
// Print expression such that it can be parsed back as a statement
// consisting of the original expression.
//
// The effect of this is for binary operators in statement position to set
// `leftmost_subexpression_in_stmt` when printing their left-hand operand.
//
// (match x {}) - 1; // match needs parens when LHS of binary operator
//
// match x {}; // not when its own statement
//
stmt: bool,
// This is the difference between:
//
// (match x {}) - 1; // subexpression needs parens
//
// let _ = match x {} - 1; // no parens
//
// There are 3 distinguishable contexts in which `print_expr` might be
// called with the expression `$match` as its argument, where `$match`
// represents an expression of kind `ExprKind::Match`:
//
// - stmt=false leftmost_subexpression_in_stmt=false
//
// Example: `let _ = $match - 1;`
//
// No parentheses required.
//
// - stmt=false leftmost_subexpression_in_stmt=true
//
// Example: `$match - 1;`
//
// Must parenthesize `($match)`, otherwise parsing back the output as a
// statement would terminate the statement after the closing brace of
// the match, parsing `-1;` as a separate statement.
//
// - stmt=true leftmost_subexpression_in_stmt=false
//
// Example: `$match;`
//
// No parentheses required.
leftmost_subexpression_in_stmt: bool,
// Print expression such that it can be parsed as a match arm.
//
// This is almost equivalent to `stmt`, but the grammar diverges a tiny bit
// between statements and match arms when it comes to braced macro calls.
// Macro calls with brace delimiter terminate a statement without a
// semicolon, but do not terminate a match-arm without comma.
//
// m! {} - 1; // two statements: a macro call followed by -1 literal
//
// match () {
// _ => m! {} - 1, // binary subtraction operator
// }
//
match_arm: bool,
// This is almost equivalent to `leftmost_subexpression_in_stmt`, other than
// for braced macro calls.
//
// If we have `m! {} - 1` as an expression, the leftmost subexpression
// `m! {}` will need to be parenthesized in the statement case but not the
// match-arm case.
//
// (m! {}) - 1; // subexpression needs parens
//
// match () {
// _ => m! {} - 1, // no parens
// }
//
leftmost_subexpression_in_match_arm: bool,
// This is the difference between:
//
// if let _ = (Struct {}) {} // needs parens
//
// match () {
// () if let _ = Struct {} => {} // no parens
// }
//
condition: bool,
// This is the difference between:
//
// if break Struct {} == (break) {} // needs parens
//
// if break break == Struct {} {} // no parens
//
rightmost_subexpression_in_condition: bool,
// This is the difference between:
//
// if break ({ x }).field + 1 {} needs parens
//
// if break 1 + { x }.field {} // no parens
//
leftmost_subexpression_in_optional_operand: bool,
// This is the difference between:
//
// let _ = (return) - 1; // without paren, this would return -1
//
// let _ = return + 1; // no paren because '+' cannot begin expr
//
next_operator_can_begin_expr: bool,
// This is the difference between:
//
// let _ = 1 + return 1; // no parens if rightmost subexpression
//
// let _ = 1 + (return 1) + 1; // needs parens
//
next_operator_can_continue_expr: bool,
// This is the difference between:
//
// let _ = x as u8 + T;
//
// let _ = (x as u8) < T;
//
// Without parens, the latter would want to parse `u8<T...` as a type.
next_operator_can_begin_generics: bool,
}
impl FixupContext {
/// The default amount of fixing is minimal fixing. Fixups should be turned
/// on in a targeted fashion where needed.
pub const NONE: Self = FixupContext {
previous_operator: Precedence::MIN,
next_operator: Precedence::MIN,
stmt: false,
leftmost_subexpression_in_stmt: false,
match_arm: false,
leftmost_subexpression_in_match_arm: false,
condition: false,
rightmost_subexpression_in_condition: false,
leftmost_subexpression_in_optional_operand: false,
next_operator_can_begin_expr: false,
next_operator_can_continue_expr: false,
next_operator_can_begin_generics: false,
};
/// Create the initial fixup for printing an expression in statement
/// position.
pub fn new_stmt() -> Self {
FixupContext {
stmt: true,
..FixupContext::NONE
}
}
/// Create the initial fixup for printing an expression as the right-hand
/// side of a match arm.
pub fn new_match_arm() -> Self {
FixupContext {
match_arm: true,
..FixupContext::NONE
}
}
/// Create the initial fixup for printing an expression as the "condition"
/// of an `if` or `while`. There are a few other positions which are
/// grammatically equivalent and also use this, such as the iterator
/// expression in `for` and the scrutinee in `match`.
pub fn new_condition() -> Self {
FixupContext {
condition: true,
rightmost_subexpression_in_condition: true,
..FixupContext::NONE
}
}
/// Transform this fixup into the one that should apply when printing the
/// leftmost subexpression of the current expression.
///
/// The leftmost subexpression is any subexpression that has the same first
/// token as the current expression, but has a different last token.
///
/// For example in `$a + $b` and `$a.method()`, the subexpression `$a` is a
/// leftmost subexpression.
///
/// Not every expression has a leftmost subexpression. For example neither
/// `-$a` nor `[$a]` have one.
pub fn leftmost_subexpression_with_operator(
self,
expr: &Expr,
next_operator_can_begin_expr: bool,
next_operator_can_begin_generics: bool,
precedence: Precedence,
) -> (Precedence, Self) {
let fixup = FixupContext {
next_operator: precedence,
stmt: false,
leftmost_subexpression_in_stmt: self.stmt || self.leftmost_subexpression_in_stmt,
match_arm: false,
leftmost_subexpression_in_match_arm: self.match_arm
|| self.leftmost_subexpression_in_match_arm,
rightmost_subexpression_in_condition: false,
next_operator_can_begin_expr,
next_operator_can_continue_expr: true,
next_operator_can_begin_generics,
..self
};
(fixup.leftmost_subexpression_precedence(expr), fixup)
}
/// Transform this fixup into the one that should apply when printing a
/// leftmost subexpression followed by a `.` or `?` token, which confer
/// different statement boundary rules compared to other leftmost
/// subexpressions.
pub fn leftmost_subexpression_with_dot(self, expr: &Expr) -> (Precedence, Self) {
let fixup = FixupContext {
next_operator: Precedence::Unambiguous,
stmt: self.stmt || self.leftmost_subexpression_in_stmt,
leftmost_subexpression_in_stmt: false,
match_arm: self.match_arm || self.leftmost_subexpression_in_match_arm,
leftmost_subexpression_in_match_arm: false,
rightmost_subexpression_in_condition: false,
next_operator_can_begin_expr: false,
next_operator_can_continue_expr: true,
next_operator_can_begin_generics: false,
..self
};
(fixup.leftmost_subexpression_precedence(expr), fixup)
}
fn leftmost_subexpression_precedence(self, expr: &Expr) -> Precedence {
if !self.next_operator_can_begin_expr || self.next_operator == Precedence::Range {
if let Scan::Bailout = scan_right(expr, self, Precedence::MIN, 0, 0) {
if scan_left(expr, self) {
return Precedence::Unambiguous;
}
}
}
self.precedence(expr)
}
/// Transform this fixup into the one that should apply when printing the
/// rightmost subexpression of the current expression.
///
/// The rightmost subexpression is any subexpression that has a different
/// first token than the current expression, but has the same last token.
///
/// For example in `$a + $b` and `-$b`, the subexpression `$b` is a
/// rightmost subexpression.
///
/// Not every expression has a rightmost subexpression. For example neither
/// `[$b]` nor `$a.f($b)` have one.
pub fn rightmost_subexpression(
self,
expr: &Expr,
precedence: Precedence,
) -> (Precedence, Self) {
let fixup = self.rightmost_subexpression_fixup(false, false, precedence);
(fixup.rightmost_subexpression_precedence(expr), fixup)
}
pub fn rightmost_subexpression_fixup(
self,
reset_allow_struct: bool,
optional_operand: bool,
precedence: Precedence,
) -> Self {
FixupContext {
previous_operator: precedence,
stmt: false,
leftmost_subexpression_in_stmt: false,
match_arm: false,
leftmost_subexpression_in_match_arm: false,
condition: self.condition && !reset_allow_struct,
leftmost_subexpression_in_optional_operand: self.condition && optional_operand,
..self
}
}
pub fn rightmost_subexpression_precedence(self, expr: &Expr) -> Precedence {
let default_prec = self.precedence(expr);
if match self.previous_operator {
Precedence::Assign | Precedence::Let | Precedence::Prefix => {
default_prec < self.previous_operator
}
_ => default_prec <= self.previous_operator,
} && match self.next_operator {
Precedence::Range | Precedence::Or | Precedence::And => true,
_ => !self.next_operator_can_begin_expr,
} {
if let Scan::Bailout | Scan::Fail = scan_right(expr, self, self.previous_operator, 1, 0)
{
if scan_left(expr, self) {
return Precedence::Prefix;
}
}
}
default_prec
}
/// Determine whether parentheses are needed around the given expression to
/// head off the early termination of a statement or condition.
pub fn parenthesize(self, expr: &Expr) -> bool {
(self.leftmost_subexpression_in_stmt && !classify::requires_semi_to_be_stmt(expr))
|| ((self.stmt || self.leftmost_subexpression_in_stmt) && matches!(expr, Expr::Let(_)))
|| (self.leftmost_subexpression_in_match_arm
&& !classify::requires_comma_to_be_match_arm(expr))
|| (self.condition && matches!(expr, Expr::Struct(_)))
|| (self.rightmost_subexpression_in_condition
&& matches!(
expr,
Expr::Return(ExprReturn { expr: None, .. })
| Expr::Yield(ExprYield { expr: None, .. })
))
|| (self.rightmost_subexpression_in_condition
&& !self.condition
&& matches!(
expr,
Expr::Break(ExprBreak { expr: None, .. })
| Expr::Path(_)
| Expr::Range(ExprRange { end: None, .. })
))
|| (self.leftmost_subexpression_in_optional_operand
&& matches!(expr, Expr::Block(expr) if expr.attrs.is_empty() && expr.label.is_none()))
}
/// Determines the effective precedence of a subexpression. Some expressions
/// have higher or lower precedence when adjacent to particular operators.
fn precedence(self, expr: &Expr) -> Precedence {
if self.next_operator_can_begin_expr {
// Decrease precedence of value-less jumps when followed by an
// operator that would otherwise get interpreted as beginning a
// value for the jump.
if let Expr::Break(ExprBreak { expr: None, .. })
| Expr::Return(ExprReturn { expr: None, .. })
| Expr::Yield(ExprYield { expr: None, .. }) = expr
{
return Precedence::Jump;
}
}
if !self.next_operator_can_continue_expr {
match expr {
// Increase precedence of expressions that extend to the end of
// current statement or group.
Expr::Break(_)
| Expr::Closure(_)
| Expr::Let(_)
| Expr::Return(_)
| Expr::Yield(_) => {
return Precedence::Prefix;
}
Expr::Range(e) if e.start.is_none() => return Precedence::Prefix,
_ => {}
}
}
if self.next_operator_can_begin_generics {
if let Expr::Cast(cast) = expr {
if classify::trailing_unparameterized_path(&cast.ty) {
return Precedence::MIN;
}
}
}
Precedence::of(expr)
}
}
#[derive(Copy, Clone, PartialEq)]
enum Scan {
Fail,
Bailout,
Consume,
}
fn scan_left(expr: &Expr, fixup: FixupContext) -> bool {
match expr {
Expr::Assign(_) => fixup.previous_operator <= Precedence::Assign,
Expr::Binary(e) => match Precedence::of_binop(&e.op) {
Precedence::Assign => fixup.previous_operator <= Precedence::Assign,
binop_prec => fixup.previous_operator < binop_prec,
},
Expr::Cast(_) => fixup.previous_operator < Precedence::Cast,
Expr::Range(e) => e.start.is_none() || fixup.previous_operator < Precedence::Assign,
_ => true,
}
}
fn scan_right(
expr: &Expr,
fixup: FixupContext,
precedence: Precedence,
fail_offset: u8,
bailout_offset: u8,
) -> Scan {
let consume_by_precedence = if match precedence {
Precedence::Assign | Precedence::Compare => precedence <= fixup.next_operator,
_ => precedence < fixup.next_operator,
} || fixup.next_operator == Precedence::MIN
{
Scan::Consume
} else {
Scan::Bailout
};
if fixup.parenthesize(expr) {
return consume_by_precedence;
}
match expr {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Expr::Assign(e) if e.attrs.is_empty() => {
if match fixup.next_operator {
Precedence::Unambiguous => fail_offset >= 2,
_ => bailout_offset >= 1,
} {
return Scan::Consume;
}
let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Assign);
let scan = scan_right(
&e.right,
right_fixup,
Precedence::Assign,
match fixup.next_operator {
Precedence::Unambiguous => fail_offset,
_ => 1,
},
1,
);
if let Scan::Bailout | Scan::Consume = scan {
Scan::Consume
} else if let Precedence::Unambiguous = fixup.next_operator {
Scan::Fail
} else {
Scan::Bailout
}
}
Expr::Binary(e) if e.attrs.is_empty() => {
if match fixup.next_operator {
Precedence::Unambiguous => {
fail_offset >= 2
&& (consume_by_precedence == Scan::Consume || bailout_offset >= 1)
}
_ => bailout_offset >= 1,
} {
return Scan::Consume;
}
let binop_prec = Precedence::of_binop(&e.op);
if binop_prec == Precedence::Compare && fixup.next_operator == Precedence::Compare {
return Scan::Consume;
}
let right_fixup = fixup.rightmost_subexpression_fixup(false, false, binop_prec);
let scan = scan_right(
&e.right,
right_fixup,
binop_prec,
match fixup.next_operator {
Precedence::Unambiguous => fail_offset,
_ => 1,
},
consume_by_precedence as u8 - Scan::Bailout as u8,
);
match scan {
Scan::Fail => {}
Scan::Bailout => return consume_by_precedence,
Scan::Consume => return Scan::Consume,
}
let right_needs_group = binop_prec != Precedence::Assign
&& right_fixup.rightmost_subexpression_precedence(&e.right) <= binop_prec;
if right_needs_group {
consume_by_precedence
} else if let (Scan::Fail, Precedence::Unambiguous) = (scan, fixup.next_operator) {
Scan::Fail
} else {
Scan::Bailout
}
}
Expr::RawAddr(ExprRawAddr { expr, .. })
| Expr::Reference(ExprReference { expr, .. })
| Expr::Unary(ExprUnary { expr, .. }) => {
if match fixup.next_operator {
Precedence::Unambiguous => {
fail_offset >= 2
&& (consume_by_precedence == Scan::Consume || bailout_offset >= 1)
}
_ => bailout_offset >= 1,
} {
return Scan::Consume;
}
let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Prefix);
let scan = scan_right(
expr,
right_fixup,
precedence,
match fixup.next_operator {
Precedence::Unambiguous => fail_offset,
_ => 1,
},
consume_by_precedence as u8 - Scan::Bailout as u8,
);
match scan {
Scan::Fail => {}
Scan::Bailout => return consume_by_precedence,
Scan::Consume => return Scan::Consume,
}
if right_fixup.rightmost_subexpression_precedence(expr) < Precedence::Prefix {
consume_by_precedence
} else if let (Scan::Fail, Precedence::Unambiguous) = (scan, fixup.next_operator) {
Scan::Fail
} else {
Scan::Bailout
}
}
Expr::Range(e) if e.attrs.is_empty() => match &e.end {
Some(end) => {
if fail_offset >= 2 {
return Scan::Consume;
}
let right_fixup =
fixup.rightmost_subexpression_fixup(false, true, Precedence::Range);
let scan = scan_right(
end,
right_fixup,
Precedence::Range,
fail_offset,
match fixup.next_operator {
Precedence::Assign | Precedence::Range => 0,
_ => 1,
},
);
if match (scan, fixup.next_operator) {
(Scan::Fail, _) => false,
(Scan::Bailout, Precedence::Assign | Precedence::Range) => false,
(Scan::Bailout | Scan::Consume, _) => true,
} {
return Scan::Consume;
}
if right_fixup.rightmost_subexpression_precedence(end) <= Precedence::Range {
Scan::Consume
} else {
Scan::Fail
}
}
None => {
if fixup.next_operator_can_begin_expr {
Scan::Consume
} else {
Scan::Fail
}
}
},
Expr::Break(e) => match &e.expr {
Some(value) => {
if bailout_offset >= 1 || e.label.is_none() && classify::expr_leading_label(value) {
return Scan::Consume;
}
let right_fixup = fixup.rightmost_subexpression_fixup(true, true, Precedence::Jump);
match scan_right(value, right_fixup, Precedence::Jump, 1, 1) {
Scan::Fail => Scan::Bailout,
Scan::Bailout | Scan::Consume => Scan::Consume,
}
}
None => match fixup.next_operator {
Precedence::Assign if precedence > Precedence::Assign => Scan::Fail,
_ => Scan::Consume,
},
},
Expr::Return(ExprReturn { expr, .. }) | Expr::Yield(ExprYield { expr, .. }) => match expr {
Some(e) => {
if bailout_offset >= 1 {
return Scan::Consume;
}
let right_fixup =
fixup.rightmost_subexpression_fixup(true, false, Precedence::Jump);
match scan_right(e, right_fixup, Precedence::Jump, 1, 1) {
Scan::Fail => Scan::Bailout,
Scan::Bailout | Scan::Consume => Scan::Consume,
}
}
None => match fixup.next_operator {
Precedence::Assign if precedence > Precedence::Assign => Scan::Fail,
_ => Scan::Consume,
},
},
Expr::Closure(_) => Scan::Consume,
Expr::Let(e) => {
if bailout_offset >= 1 {
return Scan::Consume;
}
let right_fixup = fixup.rightmost_subexpression_fixup(false, false, Precedence::Let);
let scan = scan_right(
&e.expr,
right_fixup,
Precedence::Let,
1,
if fixup.next_operator < Precedence::Let {
0
} else {
1
},
);
match scan {
Scan::Fail | Scan::Bailout if fixup.next_operator < Precedence::Let => {
return Scan::Bailout;
}
Scan::Consume => return Scan::Consume,
_ => {}
}
if right_fixup.rightmost_subexpression_precedence(&e.expr) < Precedence::Let {
Scan::Consume
} else if let Scan::Fail = scan {
Scan::Bailout
} else {
Scan::Consume
}
}
Expr::Group(e) => scan_right(&e.expr, fixup, precedence, fail_offset, bailout_offset),
Expr::Array(_)
| Expr::Assign(_)
| Expr::Async(_)
| Expr::Await(_)
| Expr::Binary(_)
| Expr::Block(_)
| Expr::Call(_)
| Expr::Cast(_)
| Expr::Const(_)
| Expr::Continue(_)
| Expr::Field(_)
| Expr::ForLoop(_)
| Expr::If(_)
| Expr::Index(_)
| Expr::Infer(_)
| Expr::Lit(_)
| Expr::Loop(_)
| Expr::Macro(_)
| Expr::Match(_)
| Expr::MethodCall(_)
| Expr::Paren(_)
| Expr::Path(_)
| Expr::Range(_)
| Expr::Repeat(_)
| Expr::Struct(_)
| Expr::Try(_)
| Expr::TryBlock(_)
| Expr::Tuple(_)
| Expr::Unsafe(_)
| Expr::Verbatim(_)
| Expr::While(_) => match fixup.next_operator {
Precedence::Assign | Precedence::Range if precedence == Precedence::Range => Scan::Fail,
_ if precedence == Precedence::Let && fixup.next_operator < Precedence::Let => {
Scan::Fail
}
_ => consume_by_precedence,
},
_ => match fixup.next_operator {
Precedence::Assign | Precedence::Range if precedence == Precedence::Range => Scan::Fail,
_ if precedence == Precedence::Let && fixup.next_operator < Precedence::Let => {
Scan::Fail
}
_ => consume_by_precedence,
},
}
}

426
vendor/prettyplease/src/generics.rs vendored Normal file
View File

@@ -0,0 +1,426 @@
use crate::algorithm::Printer;
use crate::iter::IterDelimited;
use crate::path::PathKind;
use crate::INDENT;
use proc_macro2::TokenStream;
use std::ptr;
use syn::{
BoundLifetimes, CapturedParam, ConstParam, Expr, GenericParam, Generics, LifetimeParam,
PreciseCapture, PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam,
TypeParamBound, WhereClause, WherePredicate,
};
impl Printer {
pub fn generics(&mut self, generics: &Generics) {
if generics.params.is_empty() {
return;
}
self.word("<");
self.cbox(0);
self.zerobreak();
// Print lifetimes before types and consts, regardless of their
// order in self.params.
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum Group {
First,
Second,
}
fn group(param: &GenericParam) -> Group {
match param {
GenericParam::Lifetime(_) => Group::First,
GenericParam::Type(_) | GenericParam::Const(_) => Group::Second,
}
}
let last = generics.params.iter().max_by_key(|param| group(param));
for current_group in [Group::First, Group::Second] {
for param in &generics.params {
if group(param) == current_group {
self.generic_param(param);
self.trailing_comma(ptr::eq(param, last.unwrap()));
}
}
}
self.offset(-INDENT);
self.end();
self.word(">");
}
fn generic_param(&mut self, generic_param: &GenericParam) {
match generic_param {
GenericParam::Type(type_param) => self.type_param(type_param),
GenericParam::Lifetime(lifetime_param) => self.lifetime_param(lifetime_param),
GenericParam::Const(const_param) => self.const_param(const_param),
}
}
pub fn bound_lifetimes(&mut self, bound_lifetimes: &BoundLifetimes) {
self.word("for<");
for param in bound_lifetimes.lifetimes.iter().delimited() {
self.generic_param(&param);
if !param.is_last {
self.word(", ");
}
}
self.word("> ");
}
fn lifetime_param(&mut self, lifetime_param: &LifetimeParam) {
self.outer_attrs(&lifetime_param.attrs);
self.lifetime(&lifetime_param.lifetime);
for lifetime in lifetime_param.bounds.iter().delimited() {
if lifetime.is_first {
self.word(": ");
} else {
self.word(" + ");
}
self.lifetime(&lifetime);
}
}
fn type_param(&mut self, type_param: &TypeParam) {
self.outer_attrs(&type_param.attrs);
self.ident(&type_param.ident);
self.ibox(INDENT);
for type_param_bound in type_param.bounds.iter().delimited() {
if type_param_bound.is_first {
self.word(": ");
} else {
self.space();
self.word("+ ");
}
self.type_param_bound(&type_param_bound);
}
if let Some(default) = &type_param.default {
self.space();
self.word("= ");
self.ty(default);
}
self.end();
}
pub fn type_param_bound(&mut self, type_param_bound: &TypeParamBound) {
match type_param_bound {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
TypeParamBound::Trait(trait_bound) => {
self.trait_bound(trait_bound, TraitBoundConst::None);
}
TypeParamBound::Lifetime(lifetime) => self.lifetime(lifetime),
TypeParamBound::PreciseCapture(precise_capture) => {
self.precise_capture(precise_capture);
}
TypeParamBound::Verbatim(bound) => self.type_param_bound_verbatim(bound),
_ => unimplemented!("unknown TypeParamBound"),
}
}
fn trait_bound(&mut self, trait_bound: &TraitBound, constness: TraitBoundConst) {
if trait_bound.paren_token.is_some() {
self.word("(");
}
if let Some(bound_lifetimes) = &trait_bound.lifetimes {
self.bound_lifetimes(bound_lifetimes);
}
match constness {
TraitBoundConst::None => {}
#[cfg(feature = "verbatim")]
TraitBoundConst::Conditional => self.word("[const] "),
#[cfg(feature = "verbatim")]
TraitBoundConst::Unconditional => self.word("const "),
}
self.trait_bound_modifier(&trait_bound.modifier);
for segment in trait_bound.path.segments.iter().delimited() {
if !segment.is_first || trait_bound.path.leading_colon.is_some() {
self.word("::");
}
self.path_segment(&segment, PathKind::Type);
}
if trait_bound.paren_token.is_some() {
self.word(")");
}
}
fn trait_bound_modifier(&mut self, trait_bound_modifier: &TraitBoundModifier) {
match trait_bound_modifier {
TraitBoundModifier::None => {}
TraitBoundModifier::Maybe(_question_mark) => self.word("?"),
}
}
#[cfg(not(feature = "verbatim"))]
fn type_param_bound_verbatim(&mut self, bound: &TokenStream) {
unimplemented!("TypeParamBound::Verbatim `{}`", bound);
}
#[cfg(feature = "verbatim")]
fn type_param_bound_verbatim(&mut self, tokens: &TokenStream) {
use syn::parse::{Parse, ParseStream, Result};
use syn::{
bracketed, parenthesized, token, ParenthesizedGenericArguments, Path, PathArguments,
Token,
};
enum TypeParamBoundVerbatim {
Ellipsis,
Const(TraitBound, TraitBoundConst),
}
impl Parse for TypeParamBoundVerbatim {
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(Token![...]) {
input.parse::<Token![...]>()?;
return Ok(TypeParamBoundVerbatim::Ellipsis);
}
let content;
let content = if input.peek(token::Paren) {
parenthesized!(content in input);
&content
} else {
input
};
let lifetimes: Option<BoundLifetimes> = content.parse()?;
let constness = if content.peek(token::Bracket) {
let conditionally_const;
bracketed!(conditionally_const in content);
conditionally_const.parse::<Token![const]>()?;
TraitBoundConst::Conditional
} else if content.peek(Token![const]) {
content.parse::<Token![const]>()?;
TraitBoundConst::Unconditional
} else {
TraitBoundConst::None
};
let modifier: TraitBoundModifier = content.parse()?;
let mut path: Path = content.parse()?;
if path.segments.last().unwrap().arguments.is_empty()
&& (content.peek(token::Paren)
|| content.peek(Token![::]) && content.peek3(token::Paren))
{
content.parse::<Option<Token![::]>>()?;
let args: ParenthesizedGenericArguments = content.parse()?;
let parenthesized = PathArguments::Parenthesized(args);
path.segments.last_mut().unwrap().arguments = parenthesized;
}
Ok(TypeParamBoundVerbatim::Const(
TraitBound {
paren_token: None,
modifier,
lifetimes,
path,
},
constness,
))
}
}
let bound: TypeParamBoundVerbatim = match syn::parse2(tokens.clone()) {
Ok(bound) => bound,
Err(_) => unimplemented!("TypeParamBound::Verbatim `{}`", tokens),
};
match bound {
TypeParamBoundVerbatim::Ellipsis => {
self.word("...");
}
TypeParamBoundVerbatim::Const(trait_bound, constness) => {
self.trait_bound(&trait_bound, constness);
}
}
}
fn const_param(&mut self, const_param: &ConstParam) {
self.outer_attrs(&const_param.attrs);
self.word("const ");
self.ident(&const_param.ident);
self.word(": ");
self.ty(&const_param.ty);
if let Some(default) = &const_param.default {
self.word(" = ");
self.const_argument(default);
}
}
pub fn where_clause_for_body(&mut self, where_clause: &Option<WhereClause>) {
let hardbreaks = true;
let semi = false;
self.where_clause_impl(where_clause, hardbreaks, semi);
}
pub fn where_clause_semi(&mut self, where_clause: &Option<WhereClause>) {
let hardbreaks = true;
let semi = true;
self.where_clause_impl(where_clause, hardbreaks, semi);
}
pub fn where_clause_oneline(&mut self, where_clause: &Option<WhereClause>) {
let hardbreaks = false;
let semi = false;
self.where_clause_impl(where_clause, hardbreaks, semi);
}
pub fn where_clause_oneline_semi(&mut self, where_clause: &Option<WhereClause>) {
let hardbreaks = false;
let semi = true;
self.where_clause_impl(where_clause, hardbreaks, semi);
}
fn where_clause_impl(
&mut self,
where_clause: &Option<WhereClause>,
hardbreaks: bool,
semi: bool,
) {
let where_clause = match where_clause {
Some(where_clause) if !where_clause.predicates.is_empty() => where_clause,
_ => {
if semi {
self.word(";");
} else {
self.nbsp();
}
return;
}
};
if hardbreaks {
self.hardbreak();
self.offset(-INDENT);
self.word("where");
self.hardbreak();
for predicate in where_clause.predicates.iter().delimited() {
self.where_predicate(&predicate);
if predicate.is_last && semi {
self.word(";");
} else {
self.word(",");
self.hardbreak();
}
}
if !semi {
self.offset(-INDENT);
}
} else {
self.space();
self.offset(-INDENT);
self.word("where");
self.space();
for predicate in where_clause.predicates.iter().delimited() {
self.where_predicate(&predicate);
if predicate.is_last && semi {
self.word(";");
} else {
self.trailing_comma_or_space(predicate.is_last);
}
}
if !semi {
self.offset(-INDENT);
}
}
}
fn where_predicate(&mut self, predicate: &WherePredicate) {
match predicate {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
WherePredicate::Type(predicate) => self.predicate_type(predicate),
WherePredicate::Lifetime(predicate) => self.predicate_lifetime(predicate),
_ => unimplemented!("unknown WherePredicate"),
}
}
fn predicate_type(&mut self, predicate: &PredicateType) {
if let Some(bound_lifetimes) = &predicate.lifetimes {
self.bound_lifetimes(bound_lifetimes);
}
self.ty(&predicate.bounded_ty);
self.word(":");
if predicate.bounds.len() == 1 {
self.ibox(0);
} else {
self.ibox(INDENT);
}
for type_param_bound in predicate.bounds.iter().delimited() {
if type_param_bound.is_first {
self.nbsp();
} else {
self.space();
self.word("+ ");
}
self.type_param_bound(&type_param_bound);
}
self.end();
}
fn predicate_lifetime(&mut self, predicate: &PredicateLifetime) {
self.lifetime(&predicate.lifetime);
self.word(":");
self.ibox(INDENT);
for lifetime in predicate.bounds.iter().delimited() {
if lifetime.is_first {
self.nbsp();
} else {
self.space();
self.word("+ ");
}
self.lifetime(&lifetime);
}
self.end();
}
fn precise_capture(&mut self, precise_capture: &PreciseCapture) {
self.word("use<");
for capture in precise_capture.params.iter().delimited() {
self.captured_param(&capture);
if !capture.is_last {
self.word(", ");
}
}
self.word(">");
}
fn captured_param(&mut self, capture: &CapturedParam) {
match capture {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
CapturedParam::Lifetime(lifetime) => self.lifetime(lifetime),
CapturedParam::Ident(ident) => self.ident(ident),
_ => unimplemented!("unknown CapturedParam"),
}
}
pub fn const_argument(&mut self, expr: &Expr) {
match expr {
#![cfg_attr(all(test, exhaustive), allow(non_exhaustive_omitted_patterns))]
Expr::Lit(expr) => self.expr_lit(expr),
Expr::Path(expr)
if expr.attrs.is_empty()
&& expr.qself.is_none()
&& expr.path.get_ident().is_some() =>
{
self.expr_path(expr);
}
Expr::Block(expr) => self.expr_block(expr),
_ => {
self.cbox(INDENT);
self.expr_as_small_block(expr, 0);
self.end();
}
}
}
}
enum TraitBoundConst {
None,
#[cfg(feature = "verbatim")]
Conditional,
#[cfg(feature = "verbatim")]
Unconditional,
}

1813
vendor/prettyplease/src/item.rs vendored Normal file

File diff suppressed because it is too large Load Diff

46
vendor/prettyplease/src/iter.rs vendored Normal file
View File

@@ -0,0 +1,46 @@
use std::iter::Peekable;
use std::ops::Deref;
pub struct Delimited<I: Iterator> {
is_first: bool,
iter: Peekable<I>,
}
pub trait IterDelimited: Iterator + Sized {
fn delimited(self) -> Delimited<Self> {
Delimited {
is_first: true,
iter: self.peekable(),
}
}
}
impl<I: Iterator> IterDelimited for I {}
pub struct IteratorItem<T> {
value: T,
pub is_first: bool,
pub is_last: bool,
}
impl<I: Iterator> Iterator for Delimited<I> {
type Item = IteratorItem<I::Item>;
fn next(&mut self) -> Option<Self::Item> {
let item = IteratorItem {
value: self.iter.next()?,
is_first: self.is_first,
is_last: self.iter.peek().is_none(),
};
self.is_first = false;
Some(item)
}
}
impl<T> Deref for IteratorItem<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}

385
vendor/prettyplease/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,385 @@
//! [![github]](https://github.com/dtolnay/prettyplease)&ensp;[![crates-io]](https://crates.io/crates/prettyplease)&ensp;[![docs-rs]](https://docs.rs/prettyplease)
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
//!
//! <br>
//!
//! **prettyplease::unparse** &mdash; a minimal `syn` syntax tree pretty-printer
//!
//! <br>
//!
//! # Overview
//!
//! This is a pretty-printer to turn a `syn` syntax tree into a `String` of
//! well-formatted source code. In contrast to rustfmt, this library is intended
//! to be suitable for arbitrary generated code.
//!
//! Rustfmt prioritizes high-quality output that is impeccable enough that you'd
//! be comfortable spending your career staring at its output &mdash; but that
//! means some heavyweight algorithms, and it has a tendency to bail out on code
//! that is hard to format (for example [rustfmt#3697], and there are dozens
//! more issues like it). That's not necessarily a big deal for human-generated
//! code because when code gets highly nested, the human will naturally be
//! inclined to refactor into more easily formattable code. But for generated
//! code, having the formatter just give up leaves it totally unreadable.
//!
//! [rustfmt#3697]: https://github.com/rust-lang/rustfmt/issues/3697
//!
//! This library is designed using the simplest possible algorithm and data
//! structures that can deliver about 95% of the quality of rustfmt-formatted
//! output. In my experience testing real-world code, approximately 97-98% of
//! output lines come out identical between rustfmt's formatting and this
//! crate's. The rest have slightly different linebreak decisions, but still
//! clearly follow the dominant modern Rust style.
//!
//! The tradeoffs made by this crate are a good fit for generated code that you
//! will *not* spend your career staring at. For example, the output of
//! `bindgen`, or the output of `cargo-expand`. In those cases it's more
//! important that the whole thing be formattable without the formatter giving
//! up, than that it be flawless.
//!
//! <br>
//!
//! # Feature matrix
//!
//! Here are a few superficial comparisons of this crate against the AST
//! pretty-printer built into rustc, and rustfmt. The sections below go into
//! more detail comparing the output of each of these libraries.
//!
//! | | prettyplease | rustc | rustfmt |
//! |:---|:---:|:---:|:---:|
//! | non-pathological behavior on big or generated code | 💚 | ❌ | ❌ |
//! | idiomatic modern formatting ("locally indistinguishable from rustfmt") | 💚 | ❌ | 💚 |
//! | throughput | 60 MB/s | 39 MB/s | 2.8 MB/s |
//! | number of dependencies | 3 | 72 | 66 |
//! | compile time including dependencies | 2.4 sec | 23.1 sec | 29.8 sec |
//! | buildable using a stable Rust compiler | 💚 | ❌ | ❌ |
//! | published to crates.io | 💚 | ❌ | ❌ |
//! | extensively configurable output | ❌ | ❌ | 💚 |
//! | intended to accommodate hand-maintained source code | ❌ | ❌ | 💚 |
//!
//! <br>
//!
//! # Comparison to rustfmt
//!
//! - [input.rs](https://github.com/dtolnay/prettyplease/blob/0.1.0/examples/input.rs)
//! - [output.prettyplease.rs](https://github.com/dtolnay/prettyplease/blob/0.1.0/examples/output.prettyplease.rs)
//! - [output.rustfmt.rs](https://github.com/dtolnay/prettyplease/blob/0.1.0/examples/output.rustfmt.rs)
//!
//! If you weren't told which output file is which, it would be practically
//! impossible to tell &mdash; **except** for line 435 in the rustfmt output,
//! which is more than 1000 characters long because rustfmt just gave up
//! formatting that part of the file:
//!
//! ```
//! # const _: &str = stringify! {{{
//! match segments[5] {
//! 0 => write!(f, "::{}", ipv4),
//! 0xffff => write!(f, "::ffff:{}", ipv4),
//! _ => unreachable!(),
//! }
//! } else { # [derive (Copy , Clone , Default)] struct Span { start : usize , len : usize , } let zeroes = { let mut longest = Span :: default () ; let mut current = Span :: default () ; for (i , & segment) in segments . iter () . enumerate () { if segment == 0 { if current . len == 0 { current . start = i ; } current . len += 1 ; if current . len > longest . len { longest = current ; } } else { current = Span :: default () ; } } longest } ; # [doc = " Write a colon-separated part of the address"] # [inline] fn fmt_subslice (f : & mut fmt :: Formatter < '_ > , chunk : & [u16]) -> fmt :: Result { if let Some ((first , tail)) = chunk . split_first () { write ! (f , "{:x}" , first) ? ; for segment in tail { f . write_char (':') ? ; write ! (f , "{:x}" , segment) ? ; } } Ok (()) } if zeroes . len > 1 { fmt_subslice (f , & segments [.. zeroes . start]) ? ; f . write_str ("::") ? ; fmt_subslice (f , & segments [zeroes . start + zeroes . len ..]) } else { fmt_subslice (f , & segments) } }
//! } else {
//! const IPV6_BUF_LEN: usize = (4 * 8) + 7;
//! let mut buf = [0u8; IPV6_BUF_LEN];
//! let mut buf_slice = &mut buf[..];
//! # }};
//! ```
//!
//! This is a pretty typical manifestation of rustfmt bailing out in generated
//! code &mdash; a chunk of the input ends up on one line. The other
//! manifestation is that you're working on some code, running rustfmt on save
//! like a conscientious developer, but after a while notice it isn't doing
//! anything. You introduce an intentional formatting issue, like a stray indent
//! or semicolon, and run rustfmt to check your suspicion. Nope, it doesn't get
//! cleaned up &mdash; rustfmt is just not formatting the part of the file you
//! are working on.
//!
//! The prettyplease library is designed to have no pathological cases that
//! force a bail out; the entire input you give it will get formatted in some
//! "good enough" form.
//!
//! Separately, rustfmt can be problematic to integrate into projects. It's
//! written using rustc's internal syntax tree, so it can't be built by a stable
//! compiler. Its releases are not regularly published to crates.io, so in Cargo
//! builds you'd need to depend on it as a git dependency, which precludes
//! publishing your crate to crates.io also. You can shell out to a `rustfmt`
//! binary, but that'll be whatever rustfmt version is installed on each
//! developer's system (if any), which can lead to spurious diffs in checked-in
//! generated code formatted by different versions. In contrast prettyplease is
//! designed to be easy to pull in as a library, and compiles fast.
//!
//! <br>
//!
//! # Comparison to rustc_ast_pretty
//!
//! - [input.rs](https://github.com/dtolnay/prettyplease/blob/0.1.0/examples/input.rs)
//! - [output.prettyplease.rs](https://github.com/dtolnay/prettyplease/blob/0.1.0/examples/output.prettyplease.rs)
//! - [output.rustc.rs](https://github.com/dtolnay/prettyplease/blob/0.1.0/examples/output.rustc.rs)
//!
//! This is the pretty-printer that gets used when rustc prints source code,
//! such as `rustc -Zunpretty=expanded`. It's used also by the standard
//! library's `stringify!` when stringifying an interpolated macro_rules AST
//! fragment, like an $:expr, and transitively by `dbg!` and many macros in the
//! ecosystem.
//!
//! Rustc's formatting is mostly okay, but does not hew closely to the dominant
//! contemporary style of Rust formatting. Some things wouldn't ever be written
//! on one line, like this `match` expression, and certainly not with a comma in
//! front of the closing brace:
//!
//! ```
//! # const _: &str = stringify! {
//! fn eq(&self, other: &IpAddr) -> bool {
//! match other { IpAddr::V4(v4) => self == v4, IpAddr::V6(_) => false, }
//! }
//! # };
//! ```
//!
//! Some places use non-multiple-of-4 indentation, which is definitely not the
//! norm:
//!
//! ```
//! # const _: &str = stringify! {
//! pub const fn to_ipv6_mapped(&self) -> Ipv6Addr {
//! let [a, b, c, d] = self.octets();
//! Ipv6Addr{inner:
//! c::in6_addr{s6_addr:
//! [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xFF,
//! 0xFF, a, b, c, d],},}
//! }
//! # };
//! ```
//!
//! And although there isn't an egregious example of it in the link because the
//! input code is pretty tame, in general rustc_ast_pretty has pathological
//! behavior on generated code. It has a tendency to use excessive horizontal
//! indentation and rapidly run out of width:
//!
//! ```
//! # const _: &str = stringify! {
//! ::std::io::_print(::core::fmt::Arguments::new_v1(&[""],
//! &match (&msg,) {
//! _args =>
//! [::core::fmt::ArgumentV1::new(_args.0,
//! ::core::fmt::Display::fmt)],
//! }));
//! # };
//! ```
//!
//! The snippets above are clearly different from modern rustfmt style. In
//! contrast, prettyplease is designed to have output that is practically
//! indistinguishable from rustfmt-formatted code.
//!
//! <br>
//!
//! # Example
//!
//! ```
//! // [dependencies]
//! // prettyplease = "0.2"
//! // syn = { version = "2", default-features = false, features = ["full", "parsing"] }
//!
//! const INPUT: &str = stringify! {
//! use crate::{
//! lazy::{Lazy, SyncLazy, SyncOnceCell}, panic,
//! sync::{ atomic::{AtomicUsize, Ordering::SeqCst},
//! mpsc::channel, Mutex, },
//! thread,
//! };
//! impl<T, U> Into<U> for T where U: From<T> {
//! fn into(self) -> U { U::from(self) }
//! }
//! };
//!
//! fn main() {
//! let syntax_tree = syn::parse_file(INPUT).unwrap();
//! let formatted = prettyplease::unparse(&syntax_tree);
//! print!("{}", formatted);
//! }
//! ```
//!
//! <br>
//!
//! # Algorithm notes
//!
//! The approach and terminology used in the implementation are derived from
//! [*Derek C. Oppen, "Pretty Printing" (1979)*][paper], on which
//! rustc_ast_pretty is also based, and from rustc_ast_pretty's implementation
//! written by Graydon Hoare in 2011 (and modernized over the years by dozens of
//! volunteer maintainers).
//!
//! [paper]: http://i.stanford.edu/pub/cstr/reports/cs/tr/79/770/CS-TR-79-770.pdf
//!
//! The paper describes two language-agnostic interacting procedures `Scan()`
//! and `Print()`. Language-specific code decomposes an input data structure
//! into a stream of `string` and `break` tokens, and `begin` and `end` tokens
//! for grouping. Each `begin`&ndash;`end` range may be identified as either
//! "consistent breaking" or "inconsistent breaking". If a group is consistently
//! breaking, then if the whole contents do not fit on the line, *every* `break`
//! token in the group will receive a linebreak. This is appropriate, for
//! example, for Rust struct literals, or arguments of a function call. If a
//! group is inconsistently breaking, then the `string` tokens in the group are
//! greedily placed on the line until out of space, and linebroken only at those
//! `break` tokens for which the next string would not fit. For example, this is
//! appropriate for the contents of a braced `use` statement in Rust.
//!
//! Scan's job is to efficiently accumulate sizing information about groups and
//! breaks. For every `begin` token we compute the distance to the matched `end`
//! token, and for every `break` we compute the distance to the next `break`.
//! The algorithm uses a ringbuffer to hold tokens whose size is not yet
//! ascertained. The maximum size of the ringbuffer is bounded by the target
//! line length and does not grow indefinitely, regardless of deep nesting in
//! the input stream. That's because once a group is sufficiently big, the
//! precise size can no longer make a difference to linebreak decisions and we
//! can effectively treat it as "infinity".
//!
//! Print's job is to use the sizing information to efficiently assign a
//! "broken" or "not broken" status to every `begin` token. At that point the
//! output is easily constructed by concatenating `string` tokens and breaking
//! at `break` tokens contained within a broken group.
//!
//! Leveraging these primitives (i.e. cleverly placing the all-or-nothing
//! consistent breaks and greedy inconsistent breaks) to yield
//! rustfmt-compatible formatting for all of Rust's syntax tree nodes is a fun
//! challenge.
//!
//! Here is a visualization of some Rust tokens fed into the pretty printing
//! algorithm. Consistently breaking `begin`&mdash;`end` pairs are represented
//! by `«`&#8288;`»`, inconsistently breaking by ``&#8288;``, `break` by `·`,
//! and the rest of the non-whitespace are `string`.
//!
//! ```text
//! use crate::«{·
//! lazy::«{·Lazy,· SyncLazy,· SyncOnceCell·}»,·
//! panic,·
//! sync::«{·
//! atomic::«{·AtomicUsize,· Ordering::SeqCst·}»,·
//! mpsc::channel,· Mutex
//! }»,·
//! thread
//! }»;·
//! ««impl<«·T,· U·»>» Into<«·U·»>· for T·
//! where·
//! U: From<«·T·»>
//! {·
//! « fn into(·«·self·») -> U {·
//! U::from(«·self·»)›·
//! » }·
//! »}·
//! ```
//!
//! The algorithm described in the paper is not quite sufficient for producing
//! well-formatted Rust code that is locally indistinguishable from rustfmt's
//! style. The reason is that in the paper, the complete non-whitespace contents
//! are assumed to be independent of linebreak decisions, with Scan and Print
//! being only in control of the whitespace (spaces and line breaks). In Rust as
//! idiomatically formatted by rustfmt, that is not the case. Trailing commas
//! are one example; the punctuation is only known *after* the broken vs
//! non-broken status of the surrounding group is known:
//!
//! ```
//! # struct Struct { x: u64, y: bool }
//! # let xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx = 0;
//! # let yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy = true;
//! #
//! let _ = Struct { x: 0, y: true };
//!
//! let _ = Struct {
//! x: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
//! y: yyyyyyyyyyyyyyyyyyyyyyyyyyyyyy, //<- trailing comma if the expression wrapped
//! };
//! ```
//!
//! The formatting of `match` expressions is another case; we want small arms on
//! the same line as the pattern, and big arms wrapped in a brace. The presence
//! of the brace punctuation, comma, and semicolon are all dependent on whether
//! the arm fits on the line:
//!
//! ```
//! # struct Entry { nanos: u32 }
//! # let total_nanos = 0u64;
//! # let mut total_secs = 0u64;
//! # let tmp;
//! # let entry = Entry { nanos: 0 };
//! # const NANOS_PER_SEC: u32 = 1_000_000_000;
//! #
//! match total_nanos.checked_add(entry.nanos as u64) {
//! Some(n) => tmp = n, //<- small arm, inline with comma
//! None => {
//! total_secs = total_secs
//! .checked_add(total_nanos / NANOS_PER_SEC as u64)
//! .expect("overflow in iter::sum over durations");
//! } //<- big arm, needs brace added, and also semicolon^
//! }
//! ```
//!
//! The printing algorithm implementation in this crate accommodates all of
//! these situations with conditional punctuation tokens whose selection can be
//! deferred and populated after it's known that the group is or is not broken.
#![doc(html_root_url = "https://docs.rs/prettyplease/0.2.37")]
#![allow(
clippy::bool_to_int_with_if,
clippy::cast_possible_wrap,
clippy::cast_sign_loss,
clippy::derive_partial_eq_without_eq,
clippy::doc_markdown,
clippy::enum_glob_use,
clippy::items_after_statements,
clippy::let_underscore_untyped,
clippy::match_like_matches_macro,
clippy::match_same_arms,
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::needless_pass_by_value,
clippy::ref_option,
clippy::similar_names,
clippy::struct_excessive_bools,
clippy::too_many_lines,
clippy::unused_self,
clippy::vec_init_then_push
)]
#![cfg_attr(all(test, exhaustive), feature(non_exhaustive_omitted_patterns_lint))]
mod algorithm;
mod attr;
mod classify;
mod convenience;
mod data;
mod expr;
mod file;
mod fixup;
mod generics;
mod item;
mod iter;
mod lifetime;
mod lit;
mod mac;
mod pat;
mod path;
mod precedence;
mod ring;
mod stmt;
mod token;
mod ty;
use crate::algorithm::Printer;
use syn::File;
// Target line width.
const MARGIN: isize = 89;
// Number of spaces increment at each level of block indentation.
const INDENT: isize = 4;
// Every line is allowed at least this much space, even if highly indented.
const MIN_SPACE: isize = 60;
pub fn unparse(file: &File) -> String {
let mut p = Printer::new();
p.file(file);
p.eof()
}

9
vendor/prettyplease/src/lifetime.rs vendored Normal file
View File

@@ -0,0 +1,9 @@
use crate::algorithm::Printer;
use syn::Lifetime;
impl Printer {
pub fn lifetime(&mut self, lifetime: &Lifetime) {
self.word("'");
self.ident(&lifetime.ident);
}
}

57
vendor/prettyplease/src/lit.rs vendored Normal file
View File

@@ -0,0 +1,57 @@
use crate::algorithm::Printer;
use proc_macro2::Literal;
use syn::{Lit, LitBool, LitByte, LitByteStr, LitCStr, LitChar, LitFloat, LitInt, LitStr};
impl Printer {
pub fn lit(&mut self, lit: &Lit) {
match lit {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Lit::Str(lit) => self.lit_str(lit),
Lit::ByteStr(lit) => self.lit_byte_str(lit),
Lit::CStr(lit) => self.lit_c_str(lit),
Lit::Byte(lit) => self.lit_byte(lit),
Lit::Char(lit) => self.lit_char(lit),
Lit::Int(lit) => self.lit_int(lit),
Lit::Float(lit) => self.lit_float(lit),
Lit::Bool(lit) => self.lit_bool(lit),
Lit::Verbatim(lit) => self.lit_verbatim(lit),
_ => unimplemented!("unknown Lit"),
}
}
pub fn lit_str(&mut self, lit: &LitStr) {
self.word(lit.token().to_string());
}
fn lit_byte_str(&mut self, lit: &LitByteStr) {
self.word(lit.token().to_string());
}
fn lit_c_str(&mut self, lit: &LitCStr) {
self.word(lit.token().to_string());
}
fn lit_byte(&mut self, lit: &LitByte) {
self.word(lit.token().to_string());
}
fn lit_char(&mut self, lit: &LitChar) {
self.word(lit.token().to_string());
}
fn lit_int(&mut self, lit: &LitInt) {
self.word(lit.token().to_string());
}
fn lit_float(&mut self, lit: &LitFloat) {
self.word(lit.token().to_string());
}
fn lit_bool(&mut self, lit: &LitBool) {
self.word(if lit.value { "true" } else { "false" });
}
fn lit_verbatim(&mut self, token: &Literal) {
self.word(token.to_string());
}
}

706
vendor/prettyplease/src/mac.rs vendored Normal file
View File

@@ -0,0 +1,706 @@
use crate::algorithm::Printer;
use crate::path::PathKind;
use crate::token::Token;
use crate::INDENT;
use proc_macro2::{Delimiter, Spacing, TokenStream};
use syn::{Ident, Macro, MacroDelimiter};
impl Printer {
pub fn mac(&mut self, mac: &Macro, ident: Option<&Ident>, semicolon: bool) {
if mac.path.is_ident("macro_rules") {
if let Some(ident) = ident {
self.macro_rules(ident, &mac.tokens);
return;
}
}
#[cfg(feature = "verbatim")]
if ident.is_none() && self.standard_library_macro(mac, semicolon) {
return;
}
self.path(&mac.path, PathKind::Simple);
self.word("!");
if let Some(ident) = ident {
self.nbsp();
self.ident(ident);
}
let (open, close, delimiter_break) = match mac.delimiter {
MacroDelimiter::Paren(_) => ("(", ")", Self::zerobreak as fn(&mut Self)),
MacroDelimiter::Brace(_) => (" {", "}", Self::hardbreak as fn(&mut Self)),
MacroDelimiter::Bracket(_) => ("[", "]", Self::zerobreak as fn(&mut Self)),
};
self.word(open);
if !mac.tokens.is_empty() {
self.cbox(INDENT);
delimiter_break(self);
self.ibox(0);
self.macro_rules_tokens(mac.tokens.clone(), false);
self.end();
delimiter_break(self);
self.offset(-INDENT);
self.end();
}
self.word(close);
if semicolon {
self.word(";");
}
}
fn macro_rules(&mut self, name: &Ident, rules: &TokenStream) {
enum State {
Start,
Matcher,
Equal,
Greater,
Expander,
}
use State::*;
self.word("macro_rules! ");
self.ident(name);
self.word(" {");
self.cbox(INDENT);
self.hardbreak_if_nonempty();
let mut state = State::Start;
for tt in rules.clone() {
let token = Token::from(tt);
match (state, token) {
(Start, Token::Group(delimiter, stream)) => {
self.delimiter_open(delimiter);
if !stream.is_empty() {
self.cbox(INDENT);
self.zerobreak();
self.ibox(0);
self.macro_rules_tokens(stream, true);
self.end();
self.zerobreak();
self.offset(-INDENT);
self.end();
}
self.delimiter_close(delimiter);
state = Matcher;
}
(Matcher, Token::Punct('=', Spacing::Joint)) => {
self.word(" =");
state = Equal;
}
(Equal, Token::Punct('>', Spacing::Alone)) => {
self.word(">");
state = Greater;
}
(Greater, Token::Group(_delimiter, stream)) => {
self.word(" {");
self.neverbreak();
if !stream.is_empty() {
self.cbox(INDENT);
self.hardbreak();
self.ibox(0);
self.macro_rules_tokens(stream, false);
self.end();
self.hardbreak();
self.offset(-INDENT);
self.end();
}
self.word("}");
state = Expander;
}
(Expander, Token::Punct(';', Spacing::Alone)) => {
self.word(";");
self.hardbreak();
state = Start;
}
_ => unimplemented!("bad macro_rules syntax"),
}
}
match state {
Start => {}
Expander => {
self.word(";");
self.hardbreak();
}
_ => self.hardbreak(),
}
self.offset(-INDENT);
self.end();
self.word("}");
}
pub fn macro_rules_tokens(&mut self, stream: TokenStream, matcher: bool) {
#[derive(PartialEq)]
enum State {
Start,
Dollar,
DollarCrate,
DollarIdent,
DollarIdentColon,
DollarParen,
DollarParenSep,
Pound,
PoundBang,
Dot,
Colon,
Colon2,
Ident,
IdentBang,
Delim,
Other,
}
use State::*;
let mut state = Start;
let mut previous_is_joint = true;
for tt in stream {
let token = Token::from(tt);
let (needs_space, next_state) = match (&state, &token) {
(Dollar, Token::Ident(_)) if matcher => (false, DollarIdent),
(Dollar, Token::Ident(ident)) if ident == "crate" => (false, DollarCrate),
(Dollar, Token::Ident(_)) => (false, Other),
(DollarIdent, Token::Punct(':', Spacing::Alone)) => (false, DollarIdentColon),
(DollarIdentColon, Token::Ident(_)) => (false, Other),
(DollarParen, Token::Punct('+' | '*' | '?', Spacing::Alone)) => (false, Other),
(DollarParen, Token::Ident(_) | Token::Literal(_)) => (false, DollarParenSep),
(DollarParen, Token::Punct(_, Spacing::Joint)) => (false, DollarParen),
(DollarParen, Token::Punct(_, Spacing::Alone)) => (false, DollarParenSep),
(DollarParenSep, Token::Punct('+' | '*', _)) => (false, Other),
(Pound, Token::Punct('!', _)) => (false, PoundBang),
(Dollar, Token::Group(Delimiter::Parenthesis, _)) => (false, DollarParen),
(Pound | PoundBang, Token::Group(Delimiter::Bracket, _)) => (false, Other),
(Ident, Token::Group(Delimiter::Parenthesis | Delimiter::Bracket, _)) => {
(false, Delim)
}
(Ident, Token::Punct('!', Spacing::Alone)) => (false, IdentBang),
(IdentBang, Token::Group(Delimiter::Parenthesis | Delimiter::Bracket, _)) => {
(false, Other)
}
(Colon, Token::Punct(':', _)) => (false, Colon2),
(_, Token::Group(Delimiter::Parenthesis | Delimiter::Bracket, _)) => (true, Delim),
(_, Token::Group(Delimiter::Brace | Delimiter::None, _)) => (true, Other),
(_, Token::Ident(ident)) if !is_keyword(ident) => {
(state != Dot && state != Colon2, Ident)
}
(_, Token::Literal(lit)) if lit.to_string().ends_with('.') => (state != Dot, Other),
(_, Token::Literal(_)) => (state != Dot, Ident),
(_, Token::Punct(',' | ';', _)) => (false, Other),
(_, Token::Punct('.', _)) if !matcher => (state != Ident && state != Delim, Dot),
(_, Token::Punct(':', Spacing::Joint)) => {
(state != Ident && state != DollarCrate, Colon)
}
(_, Token::Punct('$', _)) => (true, Dollar),
(_, Token::Punct('#', _)) => (true, Pound),
(_, _) => (true, Other),
};
if !previous_is_joint {
if needs_space {
self.space();
} else if let Token::Punct('.', _) = token {
self.zerobreak();
}
}
previous_is_joint = match token {
Token::Punct(_, Spacing::Joint) | Token::Punct('$', _) => true,
_ => false,
};
self.single_token(
token,
if matcher {
|printer, stream| printer.macro_rules_tokens(stream, true)
} else {
|printer, stream| printer.macro_rules_tokens(stream, false)
},
);
state = next_state;
}
}
}
pub(crate) fn requires_semi(delimiter: &MacroDelimiter) -> bool {
match delimiter {
MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => true,
MacroDelimiter::Brace(_) => false,
}
}
fn is_keyword(ident: &Ident) -> bool {
match ident.to_string().as_str() {
"as" | "async" | "await" | "box" | "break" | "const" | "continue" | "crate" | "dyn"
| "else" | "enum" | "extern" | "fn" | "for" | "if" | "impl" | "in" | "let" | "loop"
| "macro" | "match" | "mod" | "move" | "mut" | "pub" | "ref" | "return" | "static"
| "struct" | "trait" | "type" | "unsafe" | "use" | "where" | "while" | "yield" => true,
_ => false,
}
}
#[cfg(feature = "verbatim")]
mod standard_library {
use crate::algorithm::Printer;
use crate::expr;
use crate::fixup::FixupContext;
use crate::iter::IterDelimited;
use crate::path::PathKind;
use crate::INDENT;
use syn::ext::IdentExt;
use syn::parse::{Parse, ParseStream, Parser, Result};
use syn::punctuated::Punctuated;
use syn::{
parenthesized, token, Attribute, Expr, ExprAssign, ExprPath, Ident, Lit, Macro, Pat, Path,
Token, Type, Visibility,
};
enum KnownMacro {
Expr(Expr),
Exprs(Vec<Expr>),
Cfg(Cfg),
Matches(Matches),
ThreadLocal(Vec<ThreadLocal>),
VecArray(Punctuated<Expr, Token![,]>),
VecRepeat { elem: Expr, n: Expr },
}
enum Cfg {
Eq(Ident, Option<Lit>),
Call(Ident, Vec<Cfg>),
}
struct Matches {
expression: Expr,
pattern: Pat,
guard: Option<Expr>,
}
struct ThreadLocal {
attrs: Vec<Attribute>,
vis: Visibility,
name: Ident,
ty: Type,
init: Expr,
}
struct FormatArgs {
format_string: Expr,
args: Vec<Expr>,
}
impl Parse for FormatArgs {
fn parse(input: ParseStream) -> Result<Self> {
let format_string: Expr = input.parse()?;
let mut args = Vec::new();
while !input.is_empty() {
input.parse::<Token![,]>()?;
if input.is_empty() {
break;
}
let arg = if input.peek(Ident::peek_any)
&& input.peek2(Token![=])
&& !input.peek2(Token![==])
{
let key = input.call(Ident::parse_any)?;
let eq_token: Token![=] = input.parse()?;
let value: Expr = input.parse()?;
Expr::Assign(ExprAssign {
attrs: Vec::new(),
left: Box::new(Expr::Path(ExprPath {
attrs: Vec::new(),
qself: None,
path: Path::from(key),
})),
eq_token,
right: Box::new(value),
})
} else {
input.parse()?
};
args.push(arg);
}
Ok(FormatArgs {
format_string,
args,
})
}
}
impl KnownMacro {
fn parse_expr(input: ParseStream) -> Result<Self> {
let expr: Expr = input.parse()?;
Ok(KnownMacro::Expr(expr))
}
fn parse_expr_comma(input: ParseStream) -> Result<Self> {
let expr: Expr = input.parse()?;
input.parse::<Option<Token![,]>>()?;
Ok(KnownMacro::Exprs(vec![expr]))
}
fn parse_exprs(input: ParseStream) -> Result<Self> {
let exprs = input.parse_terminated(Expr::parse, Token![,])?;
Ok(KnownMacro::Exprs(Vec::from_iter(exprs)))
}
fn parse_assert(input: ParseStream) -> Result<Self> {
let mut exprs = Vec::new();
let cond: Expr = input.parse()?;
exprs.push(cond);
if input.parse::<Option<Token![,]>>()?.is_some() && !input.is_empty() {
let format_args: FormatArgs = input.parse()?;
exprs.push(format_args.format_string);
exprs.extend(format_args.args);
}
Ok(KnownMacro::Exprs(exprs))
}
fn parse_assert_cmp(input: ParseStream) -> Result<Self> {
let mut exprs = Vec::new();
let left: Expr = input.parse()?;
exprs.push(left);
input.parse::<Token![,]>()?;
let right: Expr = input.parse()?;
exprs.push(right);
if input.parse::<Option<Token![,]>>()?.is_some() && !input.is_empty() {
let format_args: FormatArgs = input.parse()?;
exprs.push(format_args.format_string);
exprs.extend(format_args.args);
}
Ok(KnownMacro::Exprs(exprs))
}
fn parse_cfg(input: ParseStream) -> Result<Self> {
fn parse_single(input: ParseStream) -> Result<Cfg> {
let ident: Ident = input.parse()?;
if input.peek(token::Paren) && (ident == "all" || ident == "any") {
let content;
parenthesized!(content in input);
let list = content.call(parse_multiple)?;
Ok(Cfg::Call(ident, list))
} else if input.peek(token::Paren) && ident == "not" {
let content;
parenthesized!(content in input);
let cfg = content.call(parse_single)?;
content.parse::<Option<Token![,]>>()?;
Ok(Cfg::Call(ident, vec![cfg]))
} else if input.peek(Token![=]) {
input.parse::<Token![=]>()?;
let string: Lit = input.parse()?;
Ok(Cfg::Eq(ident, Some(string)))
} else {
Ok(Cfg::Eq(ident, None))
}
}
fn parse_multiple(input: ParseStream) -> Result<Vec<Cfg>> {
let mut vec = Vec::new();
while !input.is_empty() {
let cfg = input.call(parse_single)?;
vec.push(cfg);
if input.is_empty() {
break;
}
input.parse::<Token![,]>()?;
}
Ok(vec)
}
let cfg = input.call(parse_single)?;
input.parse::<Option<Token![,]>>()?;
Ok(KnownMacro::Cfg(cfg))
}
fn parse_env(input: ParseStream) -> Result<Self> {
let mut exprs = Vec::new();
let name: Expr = input.parse()?;
exprs.push(name);
if input.parse::<Option<Token![,]>>()?.is_some() && !input.is_empty() {
let error_msg: Expr = input.parse()?;
exprs.push(error_msg);
input.parse::<Option<Token![,]>>()?;
}
Ok(KnownMacro::Exprs(exprs))
}
fn parse_format_args(input: ParseStream) -> Result<Self> {
let format_args: FormatArgs = input.parse()?;
let mut exprs = format_args.args;
exprs.insert(0, format_args.format_string);
Ok(KnownMacro::Exprs(exprs))
}
fn parse_matches(input: ParseStream) -> Result<Self> {
let expression: Expr = input.parse()?;
input.parse::<Token![,]>()?;
let pattern = input.call(Pat::parse_multi_with_leading_vert)?;
let guard = if input.parse::<Option<Token![if]>>()?.is_some() {
Some(input.parse()?)
} else {
None
};
input.parse::<Option<Token![,]>>()?;
Ok(KnownMacro::Matches(Matches {
expression,
pattern,
guard,
}))
}
fn parse_thread_local(input: ParseStream) -> Result<Self> {
let mut items = Vec::new();
while !input.is_empty() {
let attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
input.parse::<Token![static]>()?;
let name: Ident = input.parse()?;
input.parse::<Token![:]>()?;
let ty: Type = input.parse()?;
input.parse::<Token![=]>()?;
let init: Expr = input.parse()?;
if input.is_empty() {
break;
}
input.parse::<Token![;]>()?;
items.push(ThreadLocal {
attrs,
vis,
name,
ty,
init,
});
}
Ok(KnownMacro::ThreadLocal(items))
}
fn parse_vec(input: ParseStream) -> Result<Self> {
if input.is_empty() {
return Ok(KnownMacro::VecArray(Punctuated::new()));
}
let first: Expr = input.parse()?;
if input.parse::<Option<Token![;]>>()?.is_some() {
let len: Expr = input.parse()?;
Ok(KnownMacro::VecRepeat {
elem: first,
n: len,
})
} else {
let mut vec = Punctuated::new();
vec.push_value(first);
while !input.is_empty() {
let comma: Token![,] = input.parse()?;
vec.push_punct(comma);
if input.is_empty() {
break;
}
let next: Expr = input.parse()?;
vec.push_value(next);
}
Ok(KnownMacro::VecArray(vec))
}
}
fn parse_write(input: ParseStream) -> Result<Self> {
let mut exprs = Vec::new();
let dst: Expr = input.parse()?;
exprs.push(dst);
input.parse::<Token![,]>()?;
let format_args: FormatArgs = input.parse()?;
exprs.push(format_args.format_string);
exprs.extend(format_args.args);
Ok(KnownMacro::Exprs(exprs))
}
fn parse_writeln(input: ParseStream) -> Result<Self> {
let mut exprs = Vec::new();
let dst: Expr = input.parse()?;
exprs.push(dst);
if input.parse::<Option<Token![,]>>()?.is_some() && !input.is_empty() {
let format_args: FormatArgs = input.parse()?;
exprs.push(format_args.format_string);
exprs.extend(format_args.args);
}
Ok(KnownMacro::Exprs(exprs))
}
}
impl Printer {
pub fn standard_library_macro(&mut self, mac: &Macro, mut semicolon: bool) -> bool {
let name = mac.path.segments.last().unwrap().ident.to_string();
let parser = match name.as_str() {
"addr_of" | "addr_of_mut" => KnownMacro::parse_expr,
"assert" | "debug_assert" => KnownMacro::parse_assert,
"assert_eq" | "assert_ne" | "debug_assert_eq" | "debug_assert_ne" => {
KnownMacro::parse_assert_cmp
}
"cfg" => KnownMacro::parse_cfg,
"compile_error" | "include" | "include_bytes" | "include_str" | "option_env" => {
KnownMacro::parse_expr_comma
}
"concat" | "concat_bytes" | "dbg" => KnownMacro::parse_exprs,
"const_format_args" | "eprint" | "eprintln" | "format" | "format_args"
| "format_args_nl" | "panic" | "print" | "println" | "todo" | "unimplemented"
| "unreachable" => KnownMacro::parse_format_args,
"env" => KnownMacro::parse_env,
"matches" => KnownMacro::parse_matches,
"thread_local" => KnownMacro::parse_thread_local,
"vec" => KnownMacro::parse_vec,
"write" => KnownMacro::parse_write,
"writeln" => KnownMacro::parse_writeln,
_ => return false,
};
let known_macro = match parser.parse2(mac.tokens.clone()) {
Ok(known_macro) => known_macro,
Err(_) => return false,
};
self.path(&mac.path, PathKind::Simple);
self.word("!");
match &known_macro {
KnownMacro::Expr(expr) => {
self.word("(");
self.cbox(INDENT);
self.zerobreak();
self.expr(expr, FixupContext::NONE);
self.zerobreak();
self.offset(-INDENT);
self.end();
self.word(")");
}
KnownMacro::Exprs(exprs) => {
self.word("(");
self.cbox(INDENT);
self.zerobreak();
for elem in exprs.iter().delimited() {
self.expr(&elem, FixupContext::NONE);
self.trailing_comma(elem.is_last);
}
self.offset(-INDENT);
self.end();
self.word(")");
}
KnownMacro::Cfg(cfg) => {
self.word("(");
self.cfg(cfg);
self.word(")");
}
KnownMacro::Matches(matches) => {
self.word("(");
self.cbox(INDENT);
self.zerobreak();
self.expr(&matches.expression, FixupContext::NONE);
self.word(",");
self.space();
self.pat(&matches.pattern);
if let Some(guard) = &matches.guard {
self.space();
self.word("if ");
self.expr(guard, FixupContext::NONE);
}
self.zerobreak();
self.offset(-INDENT);
self.end();
self.word(")");
}
KnownMacro::ThreadLocal(items) => {
self.word(" {");
self.cbox(INDENT);
self.hardbreak_if_nonempty();
for item in items {
self.outer_attrs(&item.attrs);
self.cbox(0);
self.visibility(&item.vis);
self.word("static ");
self.ident(&item.name);
self.word(": ");
self.ty(&item.ty);
self.word(" = ");
self.neverbreak();
self.expr(&item.init, FixupContext::NONE);
self.word(";");
self.end();
self.hardbreak();
}
self.offset(-INDENT);
self.end();
self.word("}");
semicolon = false;
}
KnownMacro::VecArray(vec) => {
if vec.is_empty() {
self.word("[]");
} else if expr::simple_array(vec) {
self.cbox(INDENT);
self.word("[");
self.zerobreak();
self.ibox(0);
for elem in vec.iter().delimited() {
self.expr(&elem, FixupContext::NONE);
if !elem.is_last {
self.word(",");
self.space();
}
}
self.end();
self.trailing_comma(true);
self.offset(-INDENT);
self.word("]");
self.end();
} else {
self.word("[");
self.cbox(INDENT);
self.zerobreak();
for elem in vec.iter().delimited() {
self.expr(&elem, FixupContext::NONE);
self.trailing_comma(elem.is_last);
}
self.offset(-INDENT);
self.end();
self.word("]");
}
}
KnownMacro::VecRepeat { elem, n } => {
self.word("[");
self.cbox(INDENT);
self.zerobreak();
self.expr(elem, FixupContext::NONE);
self.word(";");
self.space();
self.expr(n, FixupContext::NONE);
self.zerobreak();
self.offset(-INDENT);
self.end();
self.word("]");
}
}
if semicolon {
self.word(";");
}
true
}
fn cfg(&mut self, cfg: &Cfg) {
match cfg {
Cfg::Eq(ident, value) => {
self.ident(ident);
if let Some(value) = value {
self.word(" = ");
self.lit(value);
}
}
Cfg::Call(ident, args) => {
self.ident(ident);
self.word("(");
self.cbox(INDENT);
self.zerobreak();
for arg in args.iter().delimited() {
self.cfg(&arg);
self.trailing_comma(arg.is_last);
}
self.offset(-INDENT);
self.end();
self.word(")");
}
}
}
}
}

254
vendor/prettyplease/src/pat.rs vendored Normal file
View File

@@ -0,0 +1,254 @@
use crate::algorithm::Printer;
use crate::fixup::FixupContext;
use crate::iter::IterDelimited;
use crate::path::PathKind;
use crate::INDENT;
use proc_macro2::TokenStream;
use syn::{
FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest, PatSlice, PatStruct, PatTuple,
PatTupleStruct, PatType, PatWild,
};
impl Printer {
pub fn pat(&mut self, pat: &Pat) {
match pat {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Pat::Const(pat) => self.expr_const(pat),
Pat::Ident(pat) => self.pat_ident(pat),
Pat::Lit(pat) => self.expr_lit(pat),
Pat::Macro(pat) => self.expr_macro(pat),
Pat::Or(pat) => self.pat_or(pat),
Pat::Paren(pat) => self.pat_paren(pat),
Pat::Path(pat) => self.expr_path(pat),
Pat::Range(pat) => self.expr_range(pat, FixupContext::NONE),
Pat::Reference(pat) => self.pat_reference(pat),
Pat::Rest(pat) => self.pat_rest(pat),
Pat::Slice(pat) => self.pat_slice(pat),
Pat::Struct(pat) => self.pat_struct(pat),
Pat::Tuple(pat) => self.pat_tuple(pat),
Pat::TupleStruct(pat) => self.pat_tuple_struct(pat),
Pat::Type(pat) => self.pat_type(pat),
Pat::Verbatim(pat) => self.pat_verbatim(pat),
Pat::Wild(pat) => self.pat_wild(pat),
_ => unimplemented!("unknown Pat"),
}
}
fn pat_ident(&mut self, pat: &PatIdent) {
self.outer_attrs(&pat.attrs);
if pat.by_ref.is_some() {
self.word("ref ");
}
if pat.mutability.is_some() {
self.word("mut ");
}
self.ident(&pat.ident);
if let Some((_at_token, subpat)) = &pat.subpat {
self.word(" @ ");
self.pat(subpat);
}
}
fn pat_or(&mut self, pat: &PatOr) {
self.outer_attrs(&pat.attrs);
let mut consistent_break = false;
for case in &pat.cases {
match case {
Pat::Lit(_) | Pat::Wild(_) => {}
_ => {
consistent_break = true;
break;
}
}
}
if consistent_break {
self.cbox(0);
} else {
self.ibox(0);
}
for case in pat.cases.iter().delimited() {
if !case.is_first {
self.space();
self.word("| ");
}
self.pat(&case);
}
self.end();
}
fn pat_paren(&mut self, pat: &PatParen) {
self.outer_attrs(&pat.attrs);
self.word("(");
self.pat(&pat.pat);
self.word(")");
}
fn pat_reference(&mut self, pat: &PatReference) {
self.outer_attrs(&pat.attrs);
self.word("&");
if pat.mutability.is_some() {
self.word("mut ");
}
self.pat(&pat.pat);
}
fn pat_rest(&mut self, pat: &PatRest) {
self.outer_attrs(&pat.attrs);
self.word("..");
}
fn pat_slice(&mut self, pat: &PatSlice) {
self.outer_attrs(&pat.attrs);
self.word("[");
for elem in pat.elems.iter().delimited() {
self.pat(&elem);
self.trailing_comma(elem.is_last);
}
self.word("]");
}
fn pat_struct(&mut self, pat: &PatStruct) {
self.outer_attrs(&pat.attrs);
self.cbox(INDENT);
self.path(&pat.path, PathKind::Expr);
self.word(" {");
self.space_if_nonempty();
for field in pat.fields.iter().delimited() {
self.field_pat(&field);
self.trailing_comma_or_space(field.is_last && pat.rest.is_none());
}
if let Some(rest) = &pat.rest {
self.pat_rest(rest);
self.space();
}
self.offset(-INDENT);
self.end();
self.word("}");
}
fn pat_tuple(&mut self, pat: &PatTuple) {
self.outer_attrs(&pat.attrs);
self.word("(");
self.cbox(INDENT);
self.zerobreak();
for elem in pat.elems.iter().delimited() {
self.pat(&elem);
if pat.elems.len() == 1 {
if pat.elems.trailing_punct() {
self.word(",");
}
self.zerobreak();
} else {
self.trailing_comma(elem.is_last);
}
}
self.offset(-INDENT);
self.end();
self.word(")");
}
fn pat_tuple_struct(&mut self, pat: &PatTupleStruct) {
self.outer_attrs(&pat.attrs);
self.path(&pat.path, PathKind::Expr);
self.word("(");
self.cbox(INDENT);
self.zerobreak();
for elem in pat.elems.iter().delimited() {
self.pat(&elem);
self.trailing_comma(elem.is_last);
}
self.offset(-INDENT);
self.end();
self.word(")");
}
pub fn pat_type(&mut self, pat: &PatType) {
self.outer_attrs(&pat.attrs);
self.pat(&pat.pat);
self.word(": ");
self.ty(&pat.ty);
}
#[cfg(not(feature = "verbatim"))]
fn pat_verbatim(&mut self, pat: &TokenStream) {
unimplemented!("Pat::Verbatim `{}`", pat);
}
#[cfg(feature = "verbatim")]
fn pat_verbatim(&mut self, tokens: &TokenStream) {
use syn::parse::{Parse, ParseStream, Result};
use syn::{braced, Attribute, Block, Token};
enum PatVerbatim {
Ellipsis,
Box(Pat),
Const(PatConst),
}
struct PatConst {
attrs: Vec<Attribute>,
block: Block,
}
impl Parse for PatVerbatim {
fn parse(input: ParseStream) -> Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(Token![box]) {
input.parse::<Token![box]>()?;
let inner = Pat::parse_single(input)?;
Ok(PatVerbatim::Box(inner))
} else if lookahead.peek(Token![const]) {
input.parse::<Token![const]>()?;
let content;
let brace_token = braced!(content in input);
let attrs = content.call(Attribute::parse_inner)?;
let stmts = content.call(Block::parse_within)?;
Ok(PatVerbatim::Const(PatConst {
attrs,
block: Block { brace_token, stmts },
}))
} else if lookahead.peek(Token![...]) {
input.parse::<Token![...]>()?;
Ok(PatVerbatim::Ellipsis)
} else {
Err(lookahead.error())
}
}
}
let pat: PatVerbatim = match syn::parse2(tokens.clone()) {
Ok(pat) => pat,
Err(_) => unimplemented!("Pat::Verbatim `{}`", tokens),
};
match pat {
PatVerbatim::Ellipsis => {
self.word("...");
}
PatVerbatim::Box(pat) => {
self.word("box ");
self.pat(&pat);
}
PatVerbatim::Const(pat) => {
self.word("const ");
self.cbox(INDENT);
self.small_block(&pat.block, &pat.attrs);
self.end();
}
}
}
fn pat_wild(&mut self, pat: &PatWild) {
self.outer_attrs(&pat.attrs);
self.word("_");
}
fn field_pat(&mut self, field_pat: &FieldPat) {
self.outer_attrs(&field_pat.attrs);
if field_pat.colon_token.is_some() {
self.member(&field_pat.member);
self.word(": ");
}
self.pat(&field_pat.pat);
}
}

194
vendor/prettyplease/src/path.rs vendored Normal file
View File

@@ -0,0 +1,194 @@
use crate::algorithm::Printer;
use crate::iter::IterDelimited;
use crate::INDENT;
use std::ptr;
use syn::{
AngleBracketedGenericArguments, AssocConst, AssocType, Constraint, GenericArgument,
ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
};
#[derive(Copy, Clone, PartialEq)]
pub enum PathKind {
// a::B
Simple,
// a::B<T>
Type,
// a::B::<T>
Expr,
}
impl Printer {
pub fn path(&mut self, path: &Path, kind: PathKind) {
assert!(!path.segments.is_empty());
for segment in path.segments.iter().delimited() {
if !segment.is_first || path.leading_colon.is_some() {
self.word("::");
}
self.path_segment(&segment, kind);
}
}
pub fn path_segment(&mut self, segment: &PathSegment, kind: PathKind) {
self.ident(&segment.ident);
self.path_arguments(&segment.arguments, kind);
}
fn path_arguments(&mut self, arguments: &PathArguments, kind: PathKind) {
match arguments {
PathArguments::None => {}
PathArguments::AngleBracketed(arguments) => {
self.angle_bracketed_generic_arguments(arguments, kind);
}
PathArguments::Parenthesized(arguments) => {
self.parenthesized_generic_arguments(arguments);
}
}
}
fn generic_argument(&mut self, arg: &GenericArgument) {
match arg {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
GenericArgument::Lifetime(lifetime) => self.lifetime(lifetime),
GenericArgument::Type(ty) => self.ty(ty),
GenericArgument::Const(expr) => self.const_argument(expr),
GenericArgument::AssocType(assoc) => self.assoc_type(assoc),
GenericArgument::AssocConst(assoc) => self.assoc_const(assoc),
GenericArgument::Constraint(constraint) => self.constraint(constraint),
_ => unimplemented!("unknown GenericArgument"),
}
}
pub fn angle_bracketed_generic_arguments(
&mut self,
generic: &AngleBracketedGenericArguments,
path_kind: PathKind,
) {
if generic.args.is_empty() || path_kind == PathKind::Simple {
return;
}
if path_kind == PathKind::Expr {
self.word("::");
}
self.word("<");
self.cbox(INDENT);
self.zerobreak();
// Print lifetimes before types/consts/bindings, regardless of their
// order in self.args.
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum Group {
First,
Second,
}
fn group(arg: &GenericArgument) -> Group {
match arg {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
GenericArgument::Lifetime(_) => Group::First,
GenericArgument::Type(_)
| GenericArgument::Const(_)
| GenericArgument::AssocType(_)
| GenericArgument::AssocConst(_)
| GenericArgument::Constraint(_) => Group::Second,
_ => Group::Second,
}
}
let last = generic.args.iter().max_by_key(|param| group(param));
for current_group in [Group::First, Group::Second] {
for arg in &generic.args {
if group(arg) == current_group {
self.generic_argument(arg);
self.trailing_comma(ptr::eq(arg, last.unwrap()));
}
}
}
self.offset(-INDENT);
self.end();
self.word(">");
}
fn assoc_type(&mut self, assoc: &AssocType) {
self.ident(&assoc.ident);
if let Some(generics) = &assoc.generics {
self.angle_bracketed_generic_arguments(generics, PathKind::Type);
}
self.word(" = ");
self.ty(&assoc.ty);
}
fn assoc_const(&mut self, assoc: &AssocConst) {
self.ident(&assoc.ident);
if let Some(generics) = &assoc.generics {
self.angle_bracketed_generic_arguments(generics, PathKind::Type);
}
self.word(" = ");
self.const_argument(&assoc.value);
}
fn constraint(&mut self, constraint: &Constraint) {
self.ident(&constraint.ident);
if let Some(generics) = &constraint.generics {
self.angle_bracketed_generic_arguments(generics, PathKind::Type);
}
self.ibox(INDENT);
for bound in constraint.bounds.iter().delimited() {
if bound.is_first {
self.word(": ");
} else {
self.space();
self.word("+ ");
}
self.type_param_bound(&bound);
}
self.end();
}
fn parenthesized_generic_arguments(&mut self, arguments: &ParenthesizedGenericArguments) {
self.cbox(INDENT);
self.word("(");
self.zerobreak();
for ty in arguments.inputs.iter().delimited() {
self.ty(&ty);
self.trailing_comma(ty.is_last);
}
self.offset(-INDENT);
self.word(")");
self.return_type(&arguments.output);
self.end();
}
pub fn qpath(&mut self, qself: &Option<QSelf>, path: &Path, kind: PathKind) {
let qself = if let Some(qself) = qself {
qself
} else {
self.path(path, kind);
return;
};
assert!(qself.position < path.segments.len());
self.word("<");
self.ty(&qself.ty);
let mut segments = path.segments.iter();
if qself.position > 0 {
self.word(" as ");
for segment in segments.by_ref().take(qself.position).delimited() {
if !segment.is_first || path.leading_colon.is_some() {
self.word("::");
}
self.path_segment(&segment, PathKind::Type);
if segment.is_last {
self.word(">");
}
}
} else {
self.word(">");
}
for segment in segments {
self.word("::");
self.path_segment(segment, kind);
}
}
}

148
vendor/prettyplease/src/precedence.rs vendored Normal file
View File

@@ -0,0 +1,148 @@
use syn::{
AttrStyle, Attribute, BinOp, Expr, ExprArray, ExprAsync, ExprAwait, ExprBlock, ExprBreak,
ExprCall, ExprConst, ExprContinue, ExprField, ExprForLoop, ExprIf, ExprIndex, ExprInfer,
ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, ExprParen, ExprPath, ExprRepeat,
ExprReturn, ExprStruct, ExprTry, ExprTryBlock, ExprTuple, ExprUnsafe, ExprWhile, ExprYield,
ReturnType,
};
// Reference: https://doc.rust-lang.org/reference/expressions.html#expression-precedence
#[derive(Copy, Clone, PartialEq, PartialOrd)]
pub enum Precedence {
// return, break, closures
Jump,
// = += -= *= /= %= &= |= ^= <<= >>=
Assign,
// .. ..=
Range,
// ||
Or,
// &&
And,
// let
Let,
// == != < > <= >=
Compare,
// |
BitOr,
// ^
BitXor,
// &
BitAnd,
// << >>
Shift,
// + -
Sum,
// * / %
Product,
// as
Cast,
// unary - * ! & &mut
Prefix,
// paths, loops, function calls, array indexing, field expressions, method calls
Unambiguous,
}
impl Precedence {
pub(crate) const MIN: Self = Precedence::Jump;
pub(crate) fn of_binop(op: &BinOp) -> Self {
match op {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
BinOp::Add(_) | BinOp::Sub(_) => Precedence::Sum,
BinOp::Mul(_) | BinOp::Div(_) | BinOp::Rem(_) => Precedence::Product,
BinOp::And(_) => Precedence::And,
BinOp::Or(_) => Precedence::Or,
BinOp::BitXor(_) => Precedence::BitXor,
BinOp::BitAnd(_) => Precedence::BitAnd,
BinOp::BitOr(_) => Precedence::BitOr,
BinOp::Shl(_) | BinOp::Shr(_) => Precedence::Shift,
BinOp::Eq(_)
| BinOp::Lt(_)
| BinOp::Le(_)
| BinOp::Ne(_)
| BinOp::Ge(_)
| BinOp::Gt(_) => Precedence::Compare,
BinOp::AddAssign(_)
| BinOp::SubAssign(_)
| BinOp::MulAssign(_)
| BinOp::DivAssign(_)
| BinOp::RemAssign(_)
| BinOp::BitXorAssign(_)
| BinOp::BitAndAssign(_)
| BinOp::BitOrAssign(_)
| BinOp::ShlAssign(_)
| BinOp::ShrAssign(_) => Precedence::Assign,
_ => Precedence::MIN,
}
}
pub(crate) fn of(e: &Expr) -> Self {
fn prefix_attrs(attrs: &[Attribute]) -> Precedence {
for attr in attrs {
if let AttrStyle::Outer = attr.style {
return Precedence::Prefix;
}
}
Precedence::Unambiguous
}
match e {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Expr::Closure(e) => match e.output {
ReturnType::Default => Precedence::Jump,
ReturnType::Type(..) => prefix_attrs(&e.attrs),
},
Expr::Break(ExprBreak { expr, .. })
| Expr::Return(ExprReturn { expr, .. })
| Expr::Yield(ExprYield { expr, .. }) => match expr {
Some(_) => Precedence::Jump,
None => Precedence::Unambiguous,
},
Expr::Assign(_) => Precedence::Assign,
Expr::Range(_) => Precedence::Range,
Expr::Binary(e) => Precedence::of_binop(&e.op),
Expr::Let(_) => Precedence::Let,
Expr::Cast(_) => Precedence::Cast,
Expr::RawAddr(_) | Expr::Reference(_) | Expr::Unary(_) => Precedence::Prefix,
Expr::Array(ExprArray { attrs, .. })
| Expr::Async(ExprAsync { attrs, .. })
| Expr::Await(ExprAwait { attrs, .. })
| Expr::Block(ExprBlock { attrs, .. })
| Expr::Call(ExprCall { attrs, .. })
| Expr::Const(ExprConst { attrs, .. })
| Expr::Continue(ExprContinue { attrs, .. })
| Expr::Field(ExprField { attrs, .. })
| Expr::ForLoop(ExprForLoop { attrs, .. })
| Expr::If(ExprIf { attrs, .. })
| Expr::Index(ExprIndex { attrs, .. })
| Expr::Infer(ExprInfer { attrs, .. })
| Expr::Lit(ExprLit { attrs, .. })
| Expr::Loop(ExprLoop { attrs, .. })
| Expr::Macro(ExprMacro { attrs, .. })
| Expr::Match(ExprMatch { attrs, .. })
| Expr::MethodCall(ExprMethodCall { attrs, .. })
| Expr::Paren(ExprParen { attrs, .. })
| Expr::Path(ExprPath { attrs, .. })
| Expr::Repeat(ExprRepeat { attrs, .. })
| Expr::Struct(ExprStruct { attrs, .. })
| Expr::Try(ExprTry { attrs, .. })
| Expr::TryBlock(ExprTryBlock { attrs, .. })
| Expr::Tuple(ExprTuple { attrs, .. })
| Expr::Unsafe(ExprUnsafe { attrs, .. })
| Expr::While(ExprWhile { attrs, .. }) => prefix_attrs(attrs),
Expr::Group(e) => Precedence::of(&e.expr),
Expr::Verbatim(_) => Precedence::Unambiguous,
_ => Precedence::Unambiguous,
}
}
}

81
vendor/prettyplease/src/ring.rs vendored Normal file
View File

@@ -0,0 +1,81 @@
use std::collections::VecDeque;
use std::ops::{Index, IndexMut, Range};
pub struct RingBuffer<T> {
data: VecDeque<T>,
// Abstract index of data[0] in infinitely sized queue
offset: usize,
}
impl<T> RingBuffer<T> {
pub fn new() -> Self {
RingBuffer {
data: VecDeque::new(),
offset: 0,
}
}
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
pub fn len(&self) -> usize {
self.data.len()
}
pub fn push(&mut self, value: T) -> usize {
let index = self.offset + self.data.len();
self.data.push_back(value);
index
}
pub fn clear(&mut self) {
self.data.clear();
}
pub fn index_range(&self) -> Range<usize> {
self.offset..self.offset + self.data.len()
}
pub fn first(&self) -> &T {
&self.data[0]
}
pub fn first_mut(&mut self) -> &mut T {
&mut self.data[0]
}
pub fn pop_first(&mut self) -> T {
self.offset += 1;
self.data.pop_front().unwrap()
}
pub fn last(&self) -> &T {
self.data.back().unwrap()
}
pub fn last_mut(&mut self) -> &mut T {
self.data.back_mut().unwrap()
}
pub fn second_last(&self) -> &T {
&self.data[self.data.len() - 2]
}
pub fn pop_last(&mut self) {
self.data.pop_back().unwrap();
}
}
impl<T> Index<usize> for RingBuffer<T> {
type Output = T;
fn index(&self, index: usize) -> &Self::Output {
&self.data[index.checked_sub(self.offset).unwrap()]
}
}
impl<T> IndexMut<usize> for RingBuffer<T> {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
&mut self.data[index.checked_sub(self.offset).unwrap()]
}
}

221
vendor/prettyplease/src/stmt.rs vendored Normal file
View File

@@ -0,0 +1,221 @@
use crate::algorithm::Printer;
use crate::classify;
use crate::expr;
use crate::fixup::FixupContext;
use crate::mac;
use crate::INDENT;
use syn::{BinOp, Expr, Stmt};
impl Printer {
pub fn stmt(&mut self, stmt: &Stmt, is_last: bool) {
match stmt {
Stmt::Local(local) => {
self.outer_attrs(&local.attrs);
self.ibox(0);
self.word("let ");
self.pat(&local.pat);
if let Some(local_init) = &local.init {
self.word(" = ");
self.neverbreak();
self.subexpr(
&local_init.expr,
local_init.diverge.is_some()
&& classify::expr_trailing_brace(&local_init.expr),
FixupContext::NONE,
);
if let Some((_else, diverge)) = &local_init.diverge {
self.space();
self.word("else ");
self.end();
self.neverbreak();
self.cbox(INDENT);
if let Some(expr) = expr::simple_block(diverge) {
self.small_block(&expr.block, &[]);
} else {
self.expr_as_small_block(diverge, INDENT);
}
}
}
self.end();
self.word(";");
self.hardbreak();
}
Stmt::Item(item) => self.item(item),
Stmt::Expr(expr, None) => {
if break_after(expr) {
self.ibox(0);
self.expr_beginning_of_line(expr, false, true, FixupContext::new_stmt());
if add_semi(expr) {
self.word(";");
}
self.end();
self.hardbreak();
} else {
self.expr_beginning_of_line(expr, false, true, FixupContext::new_stmt());
}
}
Stmt::Expr(expr, Some(_semi)) => {
if let Expr::Verbatim(tokens) = expr {
if tokens.is_empty() {
return;
}
}
self.ibox(0);
self.expr_beginning_of_line(expr, false, true, FixupContext::new_stmt());
if !remove_semi(expr) {
self.word(";");
}
self.end();
self.hardbreak();
}
Stmt::Macro(stmt) => {
self.outer_attrs(&stmt.attrs);
let semicolon = stmt.semi_token.is_some()
|| !is_last && mac::requires_semi(&stmt.mac.delimiter);
self.mac(&stmt.mac, None, semicolon);
self.hardbreak();
}
}
}
}
pub fn add_semi(expr: &Expr) -> bool {
match expr {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Expr::Assign(_) | Expr::Break(_) | Expr::Continue(_) | Expr::Return(_) | Expr::Yield(_) => {
true
}
Expr::Binary(expr) =>
{
match expr.op {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
BinOp::AddAssign(_)
| BinOp::SubAssign(_)
| BinOp::MulAssign(_)
| BinOp::DivAssign(_)
| BinOp::RemAssign(_)
| BinOp::BitXorAssign(_)
| BinOp::BitAndAssign(_)
| BinOp::BitOrAssign(_)
| BinOp::ShlAssign(_)
| BinOp::ShrAssign(_) => true,
BinOp::Add(_)
| BinOp::Sub(_)
| BinOp::Mul(_)
| BinOp::Div(_)
| BinOp::Rem(_)
| BinOp::And(_)
| BinOp::Or(_)
| BinOp::BitXor(_)
| BinOp::BitAnd(_)
| BinOp::BitOr(_)
| BinOp::Shl(_)
| BinOp::Shr(_)
| BinOp::Eq(_)
| BinOp::Lt(_)
| BinOp::Le(_)
| BinOp::Ne(_)
| BinOp::Ge(_)
| BinOp::Gt(_) => false,
_ => unimplemented!("unknown BinOp"),
}
}
Expr::Group(group) => add_semi(&group.expr),
Expr::Array(_)
| Expr::Async(_)
| Expr::Await(_)
| Expr::Block(_)
| Expr::Call(_)
| Expr::Cast(_)
| Expr::Closure(_)
| Expr::Const(_)
| Expr::Field(_)
| Expr::ForLoop(_)
| Expr::If(_)
| Expr::Index(_)
| Expr::Infer(_)
| Expr::Let(_)
| Expr::Lit(_)
| Expr::Loop(_)
| Expr::Macro(_)
| Expr::Match(_)
| Expr::MethodCall(_)
| Expr::Paren(_)
| Expr::Path(_)
| Expr::Range(_)
| Expr::RawAddr(_)
| Expr::Reference(_)
| Expr::Repeat(_)
| Expr::Struct(_)
| Expr::Try(_)
| Expr::TryBlock(_)
| Expr::Tuple(_)
| Expr::Unary(_)
| Expr::Unsafe(_)
| Expr::Verbatim(_)
| Expr::While(_) => false,
_ => false,
}
}
pub fn break_after(expr: &Expr) -> bool {
if let Expr::Group(group) = expr {
if let Expr::Verbatim(verbatim) = group.expr.as_ref() {
return !verbatim.is_empty();
}
}
true
}
fn remove_semi(expr: &Expr) -> bool {
match expr {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Expr::ForLoop(_) | Expr::While(_) => true,
Expr::Group(group) => remove_semi(&group.expr),
Expr::If(expr) => match &expr.else_branch {
Some((_else_token, else_branch)) => remove_semi(else_branch),
None => true,
},
Expr::Array(_)
| Expr::Assign(_)
| Expr::Async(_)
| Expr::Await(_)
| Expr::Binary(_)
| Expr::Block(_)
| Expr::Break(_)
| Expr::Call(_)
| Expr::Cast(_)
| Expr::Closure(_)
| Expr::Continue(_)
| Expr::Const(_)
| Expr::Field(_)
| Expr::Index(_)
| Expr::Infer(_)
| Expr::Let(_)
| Expr::Lit(_)
| Expr::Loop(_)
| Expr::Macro(_)
| Expr::Match(_)
| Expr::MethodCall(_)
| Expr::Paren(_)
| Expr::Path(_)
| Expr::Range(_)
| Expr::RawAddr(_)
| Expr::Reference(_)
| Expr::Repeat(_)
| Expr::Return(_)
| Expr::Struct(_)
| Expr::Try(_)
| Expr::TryBlock(_)
| Expr::Tuple(_)
| Expr::Unary(_)
| Expr::Unsafe(_)
| Expr::Verbatim(_)
| Expr::Yield(_) => false,
_ => false,
}
}

80
vendor/prettyplease/src/token.rs vendored Normal file
View File

@@ -0,0 +1,80 @@
use crate::algorithm::Printer;
use proc_macro2::{Delimiter, Ident, Literal, Spacing, TokenStream, TokenTree};
impl Printer {
pub fn single_token(&mut self, token: Token, group_contents: fn(&mut Self, TokenStream)) {
match token {
Token::Group(delimiter, stream) => self.token_group(delimiter, stream, group_contents),
Token::Ident(ident) => self.ident(&ident),
Token::Punct(ch, _spacing) => self.token_punct(ch),
Token::Literal(literal) => self.token_literal(&literal),
}
}
fn token_group(
&mut self,
delimiter: Delimiter,
stream: TokenStream,
group_contents: fn(&mut Self, TokenStream),
) {
self.delimiter_open(delimiter);
if !stream.is_empty() {
if delimiter == Delimiter::Brace {
self.space();
}
group_contents(self, stream);
if delimiter == Delimiter::Brace {
self.space();
}
}
self.delimiter_close(delimiter);
}
pub fn ident(&mut self, ident: &Ident) {
self.word(ident.to_string());
}
pub fn token_punct(&mut self, ch: char) {
self.word(ch.to_string());
}
pub fn token_literal(&mut self, literal: &Literal) {
self.word(literal.to_string());
}
pub fn delimiter_open(&mut self, delimiter: Delimiter) {
self.word(match delimiter {
Delimiter::Parenthesis => "(",
Delimiter::Brace => "{",
Delimiter::Bracket => "[",
Delimiter::None => return,
});
}
pub fn delimiter_close(&mut self, delimiter: Delimiter) {
self.word(match delimiter {
Delimiter::Parenthesis => ")",
Delimiter::Brace => "}",
Delimiter::Bracket => "]",
Delimiter::None => return,
});
}
}
pub enum Token {
Group(Delimiter, TokenStream),
Ident(Ident),
Punct(char, Spacing),
Literal(Literal),
}
impl From<TokenTree> for Token {
fn from(tt: TokenTree) -> Self {
match tt {
TokenTree::Group(group) => Token::Group(group.delimiter(), group.stream()),
TokenTree::Ident(ident) => Token::Ident(ident),
TokenTree::Punct(punct) => Token::Punct(punct.as_char(), punct.spacing()),
TokenTree::Literal(literal) => Token::Literal(literal),
}
}
}

326
vendor/prettyplease/src/ty.rs vendored Normal file
View File

@@ -0,0 +1,326 @@
use crate::algorithm::Printer;
use crate::fixup::FixupContext;
use crate::iter::IterDelimited;
use crate::path::PathKind;
use crate::INDENT;
use proc_macro2::TokenStream;
use syn::{
Abi, BareFnArg, BareVariadic, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup,
TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference,
TypeSlice, TypeTraitObject, TypeTuple,
};
impl Printer {
pub fn ty(&mut self, ty: &Type) {
match ty {
#![cfg_attr(all(test, exhaustive), deny(non_exhaustive_omitted_patterns))]
Type::Array(ty) => self.type_array(ty),
Type::BareFn(ty) => self.type_bare_fn(ty),
Type::Group(ty) => self.type_group(ty),
Type::ImplTrait(ty) => self.type_impl_trait(ty),
Type::Infer(ty) => self.type_infer(ty),
Type::Macro(ty) => self.type_macro(ty),
Type::Never(ty) => self.type_never(ty),
Type::Paren(ty) => self.type_paren(ty),
Type::Path(ty) => self.type_path(ty),
Type::Ptr(ty) => self.type_ptr(ty),
Type::Reference(ty) => self.type_reference(ty),
Type::Slice(ty) => self.type_slice(ty),
Type::TraitObject(ty) => self.type_trait_object(ty),
Type::Tuple(ty) => self.type_tuple(ty),
Type::Verbatim(ty) => self.type_verbatim(ty),
_ => unimplemented!("unknown Type"),
}
}
fn type_array(&mut self, ty: &TypeArray) {
self.word("[");
self.ty(&ty.elem);
self.word("; ");
self.expr(&ty.len, FixupContext::NONE);
self.word("]");
}
fn type_bare_fn(&mut self, ty: &TypeBareFn) {
if let Some(bound_lifetimes) = &ty.lifetimes {
self.bound_lifetimes(bound_lifetimes);
}
if ty.unsafety.is_some() {
self.word("unsafe ");
}
if let Some(abi) = &ty.abi {
self.abi(abi);
}
self.word("fn(");
self.cbox(INDENT);
self.zerobreak();
for bare_fn_arg in ty.inputs.iter().delimited() {
self.bare_fn_arg(&bare_fn_arg);
self.trailing_comma(bare_fn_arg.is_last && ty.variadic.is_none());
}
if let Some(variadic) = &ty.variadic {
self.bare_variadic(variadic);
self.zerobreak();
}
self.offset(-INDENT);
self.end();
self.word(")");
self.return_type(&ty.output);
}
fn type_group(&mut self, ty: &TypeGroup) {
self.ty(&ty.elem);
}
fn type_impl_trait(&mut self, ty: &TypeImplTrait) {
self.word("impl ");
for type_param_bound in ty.bounds.iter().delimited() {
if !type_param_bound.is_first {
self.word(" + ");
}
self.type_param_bound(&type_param_bound);
}
}
fn type_infer(&mut self, ty: &TypeInfer) {
let _ = ty;
self.word("_");
}
fn type_macro(&mut self, ty: &TypeMacro) {
let semicolon = false;
self.mac(&ty.mac, None, semicolon);
}
fn type_never(&mut self, ty: &TypeNever) {
let _ = ty;
self.word("!");
}
fn type_paren(&mut self, ty: &TypeParen) {
self.word("(");
self.ty(&ty.elem);
self.word(")");
}
fn type_path(&mut self, ty: &TypePath) {
self.qpath(&ty.qself, &ty.path, PathKind::Type);
}
fn type_ptr(&mut self, ty: &TypePtr) {
self.word("*");
if ty.mutability.is_some() {
self.word("mut ");
} else {
self.word("const ");
}
self.ty(&ty.elem);
}
fn type_reference(&mut self, ty: &TypeReference) {
self.word("&");
if let Some(lifetime) = &ty.lifetime {
self.lifetime(lifetime);
self.nbsp();
}
if ty.mutability.is_some() {
self.word("mut ");
}
self.ty(&ty.elem);
}
fn type_slice(&mut self, ty: &TypeSlice) {
self.word("[");
self.ty(&ty.elem);
self.word("]");
}
fn type_trait_object(&mut self, ty: &TypeTraitObject) {
self.word("dyn ");
for type_param_bound in ty.bounds.iter().delimited() {
if !type_param_bound.is_first {
self.word(" + ");
}
self.type_param_bound(&type_param_bound);
}
}
fn type_tuple(&mut self, ty: &TypeTuple) {
self.word("(");
self.cbox(INDENT);
self.zerobreak();
for elem in ty.elems.iter().delimited() {
self.ty(&elem);
if ty.elems.len() == 1 {
self.word(",");
self.zerobreak();
} else {
self.trailing_comma(elem.is_last);
}
}
self.offset(-INDENT);
self.end();
self.word(")");
}
#[cfg(not(feature = "verbatim"))]
fn type_verbatim(&mut self, ty: &TokenStream) {
unimplemented!("Type::Verbatim `{}`", ty);
}
#[cfg(feature = "verbatim")]
fn type_verbatim(&mut self, tokens: &TokenStream) {
use syn::parse::{Parse, ParseStream, Result};
use syn::punctuated::Punctuated;
use syn::{token, FieldsNamed, Token, TypeParamBound};
enum TypeVerbatim {
Ellipsis,
AnonStruct(AnonStruct),
AnonUnion(AnonUnion),
DynStar(DynStar),
MutSelf(MutSelf),
}
struct AnonStruct {
fields: FieldsNamed,
}
struct AnonUnion {
fields: FieldsNamed,
}
struct DynStar {
bounds: Punctuated<TypeParamBound, Token![+]>,
}
struct MutSelf {
ty: Option<Type>,
}
impl Parse for TypeVerbatim {
fn parse(input: ParseStream) -> Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(Token![struct]) {
input.parse::<Token![struct]>()?;
let fields: FieldsNamed = input.parse()?;
Ok(TypeVerbatim::AnonStruct(AnonStruct { fields }))
} else if lookahead.peek(Token![union]) && input.peek2(token::Brace) {
input.parse::<Token![union]>()?;
let fields: FieldsNamed = input.parse()?;
Ok(TypeVerbatim::AnonUnion(AnonUnion { fields }))
} else if lookahead.peek(Token![dyn]) {
input.parse::<Token![dyn]>()?;
input.parse::<Token![*]>()?;
let bounds = input.parse_terminated(TypeParamBound::parse, Token![+])?;
Ok(TypeVerbatim::DynStar(DynStar { bounds }))
} else if lookahead.peek(Token![mut]) {
input.parse::<Token![mut]>()?;
input.parse::<Token![self]>()?;
let ty = if input.is_empty() {
None
} else {
input.parse::<Token![:]>()?;
let ty: Type = input.parse()?;
Some(ty)
};
Ok(TypeVerbatim::MutSelf(MutSelf { ty }))
} else if lookahead.peek(Token![...]) {
input.parse::<Token![...]>()?;
Ok(TypeVerbatim::Ellipsis)
} else {
Err(lookahead.error())
}
}
}
let ty: TypeVerbatim = match syn::parse2(tokens.clone()) {
Ok(ty) => ty,
Err(_) => unimplemented!("Type::Verbatim `{}`", tokens),
};
match ty {
TypeVerbatim::Ellipsis => {
self.word("...");
}
TypeVerbatim::AnonStruct(ty) => {
self.cbox(INDENT);
self.word("struct {");
self.hardbreak_if_nonempty();
for field in &ty.fields.named {
self.field(field);
self.word(",");
self.hardbreak();
}
self.offset(-INDENT);
self.end();
self.word("}");
}
TypeVerbatim::AnonUnion(ty) => {
self.cbox(INDENT);
self.word("union {");
self.hardbreak_if_nonempty();
for field in &ty.fields.named {
self.field(field);
self.word(",");
self.hardbreak();
}
self.offset(-INDENT);
self.end();
self.word("}");
}
TypeVerbatim::DynStar(ty) => {
self.word("dyn* ");
for type_param_bound in ty.bounds.iter().delimited() {
if !type_param_bound.is_first {
self.word(" + ");
}
self.type_param_bound(&type_param_bound);
}
}
TypeVerbatim::MutSelf(bare_fn_arg) => {
self.word("mut self");
if let Some(ty) = &bare_fn_arg.ty {
self.word(": ");
self.ty(ty);
}
}
}
}
pub fn return_type(&mut self, ty: &ReturnType) {
match ty {
ReturnType::Default => {}
ReturnType::Type(_arrow, ty) => {
self.word(" -> ");
self.ty(ty);
}
}
}
fn bare_fn_arg(&mut self, bare_fn_arg: &BareFnArg) {
self.outer_attrs(&bare_fn_arg.attrs);
if let Some((name, _colon)) = &bare_fn_arg.name {
self.ident(name);
self.word(": ");
}
self.ty(&bare_fn_arg.ty);
}
fn bare_variadic(&mut self, variadic: &BareVariadic) {
self.outer_attrs(&variadic.attrs);
if let Some((name, _colon)) = &variadic.name {
self.ident(name);
self.word(": ");
}
self.word("...");
}
pub fn abi(&mut self, abi: &Abi) {
self.word("extern ");
if let Some(name) = &abi.name {
self.lit_str(name);
self.nbsp();
}
}
}