Vendor dependencies for 0.3.0 release

This commit is contained in:
2025-09-27 10:29:08 -05:00
parent 0c8d39d483
commit 82ab7f317b
26803 changed files with 16134934 additions and 0 deletions

898
vendor/syn/tests/common/eq.rs vendored Normal file
View File

@@ -0,0 +1,898 @@
#![allow(unused_macro_rules)]
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_span;
extern crate thin_vec;
use rustc_ast::ast::AngleBracketedArg;
use rustc_ast::ast::AngleBracketedArgs;
use rustc_ast::ast::AnonConst;
use rustc_ast::ast::Arm;
use rustc_ast::ast::AsmMacro;
use rustc_ast::ast::AssignOpKind;
use rustc_ast::ast::AssocItemConstraint;
use rustc_ast::ast::AssocItemConstraintKind;
use rustc_ast::ast::AssocItemKind;
use rustc_ast::ast::AttrArgs;
use rustc_ast::ast::AttrId;
use rustc_ast::ast::AttrItem;
use rustc_ast::ast::AttrKind;
use rustc_ast::ast::AttrStyle;
use rustc_ast::ast::Attribute;
use rustc_ast::ast::BinOpKind;
use rustc_ast::ast::BindingMode;
use rustc_ast::ast::Block;
use rustc_ast::ast::BlockCheckMode;
use rustc_ast::ast::BorrowKind;
use rustc_ast::ast::BoundAsyncness;
use rustc_ast::ast::BoundConstness;
use rustc_ast::ast::BoundPolarity;
use rustc_ast::ast::ByRef;
use rustc_ast::ast::CaptureBy;
use rustc_ast::ast::Closure;
use rustc_ast::ast::ClosureBinder;
use rustc_ast::ast::Const;
use rustc_ast::ast::ConstItem;
use rustc_ast::ast::CoroutineKind;
use rustc_ast::ast::Crate;
use rustc_ast::ast::Defaultness;
use rustc_ast::ast::Delegation;
use rustc_ast::ast::DelegationMac;
use rustc_ast::ast::DelimArgs;
use rustc_ast::ast::EnumDef;
use rustc_ast::ast::Expr;
use rustc_ast::ast::ExprField;
use rustc_ast::ast::ExprKind;
use rustc_ast::ast::Extern;
use rustc_ast::ast::FieldDef;
use rustc_ast::ast::FloatTy;
use rustc_ast::ast::Fn;
use rustc_ast::ast::FnContract;
use rustc_ast::ast::FnDecl;
use rustc_ast::ast::FnHeader;
use rustc_ast::ast::FnPtrTy;
use rustc_ast::ast::FnRetTy;
use rustc_ast::ast::FnSig;
use rustc_ast::ast::ForLoopKind;
use rustc_ast::ast::ForeignItemKind;
use rustc_ast::ast::ForeignMod;
use rustc_ast::ast::FormatAlignment;
use rustc_ast::ast::FormatArgPosition;
use rustc_ast::ast::FormatArgPositionKind;
use rustc_ast::ast::FormatArgs;
use rustc_ast::ast::FormatArgsPiece;
use rustc_ast::ast::FormatArgument;
use rustc_ast::ast::FormatArgumentKind;
use rustc_ast::ast::FormatArguments;
use rustc_ast::ast::FormatCount;
use rustc_ast::ast::FormatDebugHex;
use rustc_ast::ast::FormatOptions;
use rustc_ast::ast::FormatPlaceholder;
use rustc_ast::ast::FormatSign;
use rustc_ast::ast::FormatTrait;
use rustc_ast::ast::GenBlockKind;
use rustc_ast::ast::GenericArg;
use rustc_ast::ast::GenericArgs;
use rustc_ast::ast::GenericBound;
use rustc_ast::ast::GenericParam;
use rustc_ast::ast::GenericParamKind;
use rustc_ast::ast::Generics;
use rustc_ast::ast::Impl;
use rustc_ast::ast::ImplPolarity;
use rustc_ast::ast::Inline;
use rustc_ast::ast::InlineAsm;
use rustc_ast::ast::InlineAsmOperand;
use rustc_ast::ast::InlineAsmOptions;
use rustc_ast::ast::InlineAsmRegOrRegClass;
use rustc_ast::ast::InlineAsmSym;
use rustc_ast::ast::InlineAsmTemplatePiece;
use rustc_ast::ast::IntTy;
use rustc_ast::ast::IsAuto;
use rustc_ast::ast::Item;
use rustc_ast::ast::ItemKind;
use rustc_ast::ast::Label;
use rustc_ast::ast::Lifetime;
use rustc_ast::ast::LitFloatType;
use rustc_ast::ast::LitIntType;
use rustc_ast::ast::LitKind;
use rustc_ast::ast::Local;
use rustc_ast::ast::LocalKind;
use rustc_ast::ast::MacCall;
use rustc_ast::ast::MacCallStmt;
use rustc_ast::ast::MacStmtStyle;
use rustc_ast::ast::MacroDef;
use rustc_ast::ast::MatchKind;
use rustc_ast::ast::MetaItem;
use rustc_ast::ast::MetaItemInner;
use rustc_ast::ast::MetaItemKind;
use rustc_ast::ast::MetaItemLit;
use rustc_ast::ast::MethodCall;
use rustc_ast::ast::ModKind;
use rustc_ast::ast::ModSpans;
use rustc_ast::ast::Movability;
use rustc_ast::ast::MutTy;
use rustc_ast::ast::Mutability;
use rustc_ast::ast::NodeId;
use rustc_ast::ast::NormalAttr;
use rustc_ast::ast::Param;
use rustc_ast::ast::Parens;
use rustc_ast::ast::ParenthesizedArgs;
use rustc_ast::ast::Pat;
use rustc_ast::ast::PatField;
use rustc_ast::ast::PatFieldsRest;
use rustc_ast::ast::PatKind;
use rustc_ast::ast::Path;
use rustc_ast::ast::PathSegment;
use rustc_ast::ast::PolyTraitRef;
use rustc_ast::ast::PreciseCapturingArg;
use rustc_ast::ast::QSelf;
use rustc_ast::ast::RangeEnd;
use rustc_ast::ast::RangeLimits;
use rustc_ast::ast::RangeSyntax;
use rustc_ast::ast::Recovered;
use rustc_ast::ast::Safety;
use rustc_ast::ast::StaticItem;
use rustc_ast::ast::Stmt;
use rustc_ast::ast::StmtKind;
use rustc_ast::ast::StrLit;
use rustc_ast::ast::StrStyle;
use rustc_ast::ast::StructExpr;
use rustc_ast::ast::StructRest;
use rustc_ast::ast::Term;
use rustc_ast::ast::Trait;
use rustc_ast::ast::TraitBoundModifiers;
use rustc_ast::ast::TraitImplHeader;
use rustc_ast::ast::TraitObjectSyntax;
use rustc_ast::ast::TraitRef;
use rustc_ast::ast::Ty;
use rustc_ast::ast::TyAlias;
use rustc_ast::ast::TyAliasWhereClause;
use rustc_ast::ast::TyAliasWhereClauses;
use rustc_ast::ast::TyKind;
use rustc_ast::ast::TyPat;
use rustc_ast::ast::TyPatKind;
use rustc_ast::ast::UintTy;
use rustc_ast::ast::UnOp;
use rustc_ast::ast::UnsafeBinderCastKind;
use rustc_ast::ast::UnsafeBinderTy;
use rustc_ast::ast::UnsafeSource;
use rustc_ast::ast::UseTree;
use rustc_ast::ast::UseTreeKind;
use rustc_ast::ast::Variant;
use rustc_ast::ast::VariantData;
use rustc_ast::ast::Visibility;
use rustc_ast::ast::VisibilityKind;
use rustc_ast::ast::WhereBoundPredicate;
use rustc_ast::ast::WhereClause;
use rustc_ast::ast::WhereEqPredicate;
use rustc_ast::ast::WherePredicate;
use rustc_ast::ast::WherePredicateKind;
use rustc_ast::ast::WhereRegionPredicate;
use rustc_ast::ast::YieldKind;
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Lit, Token, TokenKind};
use rustc_ast::tokenstream::{
AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream,
Spacing, TokenStream, TokenTree,
};
use rustc_data_structures::packed::Pu128;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{sym, ByteSymbol, Ident, Symbol};
use rustc_span::{ErrorGuaranteed, Span, SyntaxContext, DUMMY_SP};
use std::borrow::Cow;
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
use std::sync::Arc;
use thin_vec::ThinVec;
pub trait SpanlessEq {
fn eq(&self, other: &Self) -> bool;
}
impl<T: ?Sized + SpanlessEq> SpanlessEq for Box<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
}
impl<T: ?Sized + SpanlessEq> SpanlessEq for Arc<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&**self, &**other)
}
}
impl<T: SpanlessEq> SpanlessEq for Option<T> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(None, None) => true,
(Some(this), Some(other)) => SpanlessEq::eq(this, other),
_ => false,
}
}
}
impl<T: SpanlessEq, E: SpanlessEq> SpanlessEq for Result<T, E> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Ok(this), Ok(other)) => SpanlessEq::eq(this, other),
(Err(this), Err(other)) => SpanlessEq::eq(this, other),
_ => false,
}
}
}
impl<T: SpanlessEq> SpanlessEq for [T] {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| SpanlessEq::eq(a, b))
}
}
impl<T: SpanlessEq> SpanlessEq for Vec<T> {
fn eq(&self, other: &Self) -> bool {
<[T] as SpanlessEq>::eq(self, other)
}
}
impl<T: SpanlessEq> SpanlessEq for ThinVec<T> {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len()
&& self
.iter()
.zip(other.iter())
.all(|(a, b)| SpanlessEq::eq(a, b))
}
}
impl<K: Eq + Hash, V: SpanlessEq, S: BuildHasher> SpanlessEq for HashMap<K, V, S> {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len()
&& self.iter().all(|(key, this_v)| {
other
.get(key)
.map_or(false, |other_v| SpanlessEq::eq(this_v, other_v))
})
}
}
impl<'a, T: ?Sized + ToOwned + SpanlessEq> SpanlessEq for Cow<'a, T> {
fn eq(&self, other: &Self) -> bool {
<T as SpanlessEq>::eq(self, other)
}
}
impl<T: SpanlessEq> SpanlessEq for Spanned<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.node, &other.node)
}
}
impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.0, &other.0) && SpanlessEq::eq(&self.1, &other.1)
}
}
impl<A: SpanlessEq, B: SpanlessEq, C: SpanlessEq> SpanlessEq for (A, B, C) {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.0, &other.0)
&& SpanlessEq::eq(&self.1, &other.1)
&& SpanlessEq::eq(&self.2, &other.2)
}
}
macro_rules! spanless_eq_true {
($name:ty) => {
impl SpanlessEq for $name {
fn eq(&self, _other: &Self) -> bool {
true
}
}
};
}
spanless_eq_true!(Span);
spanless_eq_true!(DelimSpan);
spanless_eq_true!(AttrId);
spanless_eq_true!(NodeId);
spanless_eq_true!(SyntaxContext);
spanless_eq_true!(Spacing);
macro_rules! spanless_eq_partial_eq {
($name:ty) => {
impl SpanlessEq for $name {
fn eq(&self, other: &Self) -> bool {
PartialEq::eq(self, other)
}
}
};
}
spanless_eq_partial_eq!(());
spanless_eq_partial_eq!(bool);
spanless_eq_partial_eq!(u8);
spanless_eq_partial_eq!(u16);
spanless_eq_partial_eq!(u32);
spanless_eq_partial_eq!(u128);
spanless_eq_partial_eq!(usize);
spanless_eq_partial_eq!(char);
spanless_eq_partial_eq!(str);
spanless_eq_partial_eq!(String);
spanless_eq_partial_eq!(Pu128);
spanless_eq_partial_eq!(Symbol);
spanless_eq_partial_eq!(ByteSymbol);
spanless_eq_partial_eq!(CommentKind);
spanless_eq_partial_eq!(Delimiter);
spanless_eq_partial_eq!(InlineAsmOptions);
spanless_eq_partial_eq!(token::LitKind);
spanless_eq_partial_eq!(ErrorGuaranteed);
macro_rules! spanless_eq_struct {
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
} => {
impl $(<$param: SpanlessEq>)* SpanlessEq for $($name)::+ $(<$param>)* {
fn eq(&self, other: &Self) -> bool {
let $($name)::+ { $($field: $this,)* $($ignore: _,)* } = self;
let $($name)::+ { $($field: $other,)* $($ignore: _,)* } = other;
true $(&& SpanlessEq::eq($this, $other))*
}
}
};
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
!$next:tt
$($rest:tt)*
} => {
spanless_eq_struct! {
$($name)::+ $(<$param>)*
$([$field $this $other])*
$(![$ignore])*
![$next];
$($rest)*
}
};
{
$($name:ident)::+ $(<$param:ident>)?
$([$field:tt $this:ident $other:ident])*
$(![$ignore:tt])*;
$next:tt
$($rest:tt)*
} => {
spanless_eq_struct! {
$($name)::+ $(<$param>)*
$([$field $this $other])*
[$next this other]
$(![$ignore])*;
$($rest)*
}
};
}
macro_rules! spanless_eq_enum {
{
$($name:ident)::+;
$([$($variant:ident)::+; $([$field:tt $this:ident $other:ident])* $(![$ignore:tt])*])*
} => {
impl SpanlessEq for $($name)::+ {
fn eq(&self, other: &Self) -> bool {
match self {
$(
$($variant)::+ { .. } => {}
)*
}
#[allow(unreachable_patterns)]
match (self, other) {
$(
(
$($variant)::+ { $($field: $this,)* $($ignore: _,)* },
$($variant)::+ { $($field: $other,)* $($ignore: _,)* },
) => {
true $(&& SpanlessEq::eq($this, $other))*
}
)*
_ => false,
}
}
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] (!$i:tt $($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [$([$($named)*])* $(![$ignore])* ![$i]] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] ($i:tt $($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [$([$($named)*])* [$i this other] $(![$ignore])*] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident [$($named:tt)*] ()
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
[$($name)::+::$next; $($named)*]
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident ($($field:tt)*)
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
$next [] ($($field)*)
$($rest)*
}
};
{
$($name:ident)::+;
$([$($variant:ident)::+; $($fields:tt)*])*
$next:ident
$($rest:tt)*
} => {
spanless_eq_enum! {
$($name)::+;
$([$($variant)::+; $($fields)*])*
[$($name)::+::$next;]
$($rest)*
}
};
}
spanless_eq_struct!(AngleBracketedArgs; span args);
spanless_eq_struct!(AnonConst; id value);
spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
spanless_eq_struct!(AssocItemConstraint; id ident gen_args kind span);
spanless_eq_struct!(AttrItem; unsafety path args tokens);
spanless_eq_struct!(AttrTokenStream; 0);
spanless_eq_struct!(Attribute; kind id style span);
spanless_eq_struct!(AttrsTarget; attrs tokens);
spanless_eq_struct!(BindingMode; 0 1);
spanless_eq_struct!(Block; stmts id rules span tokens);
spanless_eq_struct!(Closure; binder capture_clause constness coroutine_kind movability fn_decl body !fn_decl_span !fn_arg_span);
spanless_eq_struct!(ConstItem; defaultness ident generics ty expr define_opaque);
spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
spanless_eq_struct!(Delegation; id qself path ident rename body from_glob);
spanless_eq_struct!(DelegationMac; qself prefix suffixes body);
spanless_eq_struct!(DelimArgs; dspan delim tokens);
spanless_eq_struct!(DelimSpacing; open close);
spanless_eq_struct!(EnumDef; variants);
spanless_eq_struct!(Expr; id kind span attrs !tokens);
spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder);
spanless_eq_struct!(FieldDef; attrs id span vis safety ident ty default is_placeholder);
spanless_eq_struct!(Fn; defaultness ident generics sig contract define_opaque body);
spanless_eq_struct!(FnContract; requires ensures);
spanless_eq_struct!(FnDecl; inputs output);
spanless_eq_struct!(FnHeader; constness coroutine_kind safety ext);
spanless_eq_struct!(FnPtrTy; safety ext generic_params decl decl_span);
spanless_eq_struct!(FnSig; header decl span);
spanless_eq_struct!(ForeignMod; extern_span safety abi items);
spanless_eq_struct!(FormatArgPosition; index kind span);
spanless_eq_struct!(FormatArgs; span template arguments uncooked_fmt_str is_source_literal);
spanless_eq_struct!(FormatArgument; kind expr);
spanless_eq_struct!(FormatOptions; width precision alignment fill sign alternate zero_pad debug_hex);
spanless_eq_struct!(FormatPlaceholder; argument span format_trait format_options);
spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind !colon_span);
spanless_eq_struct!(Generics; params where_clause span);
spanless_eq_struct!(Impl; generics of_trait self_ty items);
spanless_eq_struct!(InlineAsm; asm_macro template template_strs operands clobber_abis options line_spans);
spanless_eq_struct!(InlineAsmSym; id qself path);
spanless_eq_struct!(Item<K>; attrs id span vis kind !tokens);
spanless_eq_struct!(Label; ident);
spanless_eq_struct!(Lifetime; id ident);
spanless_eq_struct!(Lit; kind symbol suffix);
spanless_eq_struct!(Local; id super_ pat ty kind span colon_sp attrs !tokens);
spanless_eq_struct!(MacCall; path args);
spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
spanless_eq_struct!(MacroDef; body macro_rules);
spanless_eq_struct!(MetaItem; unsafety path kind span);
spanless_eq_struct!(MetaItemLit; symbol suffix kind span);
spanless_eq_struct!(MethodCall; seg receiver args !span);
spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
spanless_eq_struct!(MutTy; ty mutbl);
spanless_eq_struct!(NormalAttr; item tokens);
spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output);
spanless_eq_struct!(Pat; id kind span tokens);
spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder);
spanless_eq_struct!(Path; span segments tokens);
spanless_eq_struct!(PathSegment; ident id args);
spanless_eq_struct!(PolyTraitRef; bound_generic_params modifiers trait_ref span parens);
spanless_eq_struct!(QSelf; ty path_span position);
spanless_eq_struct!(StaticItem; ident ty safety mutability expr define_opaque);
spanless_eq_struct!(Stmt; id kind span);
spanless_eq_struct!(StrLit; symbol suffix symbol_unescaped style span);
spanless_eq_struct!(StructExpr; qself path fields rest);
spanless_eq_struct!(Token; kind span);
spanless_eq_struct!(Trait; constness safety is_auto ident generics bounds items);
spanless_eq_struct!(TraitBoundModifiers; constness asyncness polarity);
spanless_eq_struct!(TraitImplHeader; defaultness safety constness polarity trait_ref);
spanless_eq_struct!(TraitRef; path ref_id);
spanless_eq_struct!(Ty; id kind span tokens);
spanless_eq_struct!(TyAlias; defaultness ident generics where_clauses bounds ty);
spanless_eq_struct!(TyAliasWhereClause; !has_where_token span);
spanless_eq_struct!(TyAliasWhereClauses; before after !split);
spanless_eq_struct!(TyPat; id kind span tokens);
spanless_eq_struct!(UnsafeBinderTy; generic_params inner_ty);
spanless_eq_struct!(UseTree; prefix kind span);
spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder);
spanless_eq_struct!(Visibility; kind span tokens);
spanless_eq_struct!(WhereBoundPredicate; bound_generic_params bounded_ty bounds);
spanless_eq_struct!(WhereClause; has_where_token predicates span);
spanless_eq_struct!(WhereEqPredicate; lhs_ty rhs_ty);
spanless_eq_struct!(WherePredicate; attrs kind id span is_placeholder);
spanless_eq_struct!(WhereRegionPredicate; lifetime bounds);
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
spanless_eq_enum!(AsmMacro; Asm GlobalAsm NakedAsm);
spanless_eq_enum!(AssocItemConstraintKind; Equality(term) Bound(bounds));
spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0) Delegation(0) DelegationMac(0));
spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(eq_span expr));
spanless_eq_enum!(AttrStyle; Outer Inner);
spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) AttrsTarget(0));
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
spanless_eq_enum!(BorrowKind; Ref Raw Pin);
spanless_eq_enum!(BoundAsyncness; Normal Async(0));
spanless_eq_enum!(BoundConstness; Never Always(0) Maybe(0));
spanless_eq_enum!(BoundPolarity; Positive Negative(0) Maybe(0));
spanless_eq_enum!(ByRef; Yes(0) No);
spanless_eq_enum!(CaptureBy; Value(move_kw) Ref Use(use_kw));
spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params));
spanless_eq_enum!(Const; Yes(0) No);
spanless_eq_enum!(Defaultness; Default(0) Final);
spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1));
spanless_eq_enum!(FloatTy; F16 F32 F64 F128);
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
spanless_eq_enum!(ForLoopKind; For ForAwait);
spanless_eq_enum!(ForeignItemKind; Static(0) Fn(0) TyAlias(0) MacCall(0));
spanless_eq_enum!(FormatAlignment; Left Right Center);
spanless_eq_enum!(FormatArgPositionKind; Implicit Number Named);
spanless_eq_enum!(FormatArgsPiece; Literal(0) Placeholder(0));
spanless_eq_enum!(FormatArgumentKind; Normal Named(0) Captured(0));
spanless_eq_enum!(FormatCount; Literal(0) Argument(0));
spanless_eq_enum!(FormatDebugHex; Lower Upper);
spanless_eq_enum!(FormatSign; Plus Minus);
spanless_eq_enum!(FormatTrait; Display Debug LowerExp UpperExp Octal Pointer Binary LowerHex UpperHex);
spanless_eq_enum!(GenBlockKind; Async Gen AsyncGen);
spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0) ParenthesizedElided(0));
spanless_eq_enum!(GenericBound; Trait(0) Outlives(0) Use(0 1));
spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty span default));
spanless_eq_enum!(ImplPolarity; Positive Negative(0));
spanless_eq_enum!(Inline; Yes No);
spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
spanless_eq_enum!(IsAuto; Yes No);
spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
spanless_eq_enum!(MatchKind; Prefix Postfix);
spanless_eq_enum!(MetaItemKind; Word List(0) NameValue(0));
spanless_eq_enum!(MetaItemInner; MetaItem(0) Lit(0));
spanless_eq_enum!(ModKind; Loaded(0 1 2 3) Unloaded);
spanless_eq_enum!(Movability; Static Movable);
spanless_eq_enum!(Mutability; Mut Not);
spanless_eq_enum!(Parens; Yes No);
spanless_eq_enum!(PatFieldsRest; Rest Recovered(0) None);
spanless_eq_enum!(PreciseCapturingArg; Lifetime(0) Arg(0 1));
spanless_eq_enum!(RangeEnd; Included(0) Excluded);
spanless_eq_enum!(RangeLimits; HalfOpen Closed);
spanless_eq_enum!(Recovered; No Yes(0));
spanless_eq_enum!(Safety; Unsafe(0) Safe(0) Default);
spanless_eq_enum!(StmtKind; Let(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
spanless_eq_enum!(StrStyle; Cooked Raw(0));
spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
spanless_eq_enum!(Term; Ty(0) Const(0));
spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2 3));
spanless_eq_enum!(TraitObjectSyntax; Dyn None);
spanless_eq_enum!(TyPatKind; Range(0 1 2) Or(0) Err(0));
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
spanless_eq_enum!(UnOp; Deref Not Neg);
spanless_eq_enum!(UnsafeBinderCastKind; Wrap Unwrap);
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
spanless_eq_enum!(UseTreeKind; Simple(0) Nested(items span) Glob);
spanless_eq_enum!(VariantData; Struct(fields recovered) Tuple(0 1) Unit(0));
spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited);
spanless_eq_enum!(WherePredicateKind; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
spanless_eq_enum!(YieldKind; Prefix(0) Postfix(0));
spanless_eq_enum!(AssignOpKind; AddAssign SubAssign MulAssign DivAssign
RemAssign BitXorAssign BitAndAssign BitOrAssign ShlAssign ShrAssign);
spanless_eq_enum!(CoroutineKind; Async(span closure_id return_impl_trait_id)
Gen(span closure_id return_impl_trait_id)
AsyncGen(span closure_id return_impl_trait_id));
spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0)
Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2 3)
If(0 1 2) While(0 1 2) ForLoop(pat iter body label kind) Loop(0 1 2)
Match(0 1 2) Closure(0) Block(0 1) Gen(0 1 2 3) Await(0 1) Use(0 1)
TryBlock(0) Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1 2) Underscore
Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0)
InlineAsm(0) OffsetOf(0 1) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0)
Yield(0) Yeet(0) Become(0) IncludedBytes(0) FormatArgs(0)
UnsafeBinderCast(0 1 2) Err(0) Dummy);
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
Sym(sym) Label(block));
spanless_eq_enum!(ItemKind; ExternCrate(0 1) Use(0) Static(0) Const(0) Fn(0)
Mod(0 1 2) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1 2) Struct(0 1 2)
Union(0 1 2) Trait(0) TraitAlias(0 1 2) Impl(0) MacCall(0) MacroDef(0 1)
Delegation(0) DelegationMac(0));
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0 1) CStr(0 1) Byte(0) Char(0)
Int(0 1) Float(0 1) Bool(0) Err(0));
spanless_eq_enum!(PatKind; Missing Wild Ident(0 1 2) Struct(0 1 2 3)
TupleStruct(0 1 2) Or(0) Path(0 1) Tuple(0) Box(0) Deref(0) Ref(0 1) Expr(0)
Range(0 1 2) Slice(0) Rest Never Guard(0 1) Paren(0) MacCall(0) Err(0));
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) PinnedRef(0 1)
FnPtr(0) UnsafeBinder(0) Never Tup(0) Path(0 1) TraitObject(0 1)
ImplTrait(0 1) Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) CVarArgs
Pat(0 1) Dummy Err(0));
impl SpanlessEq for Ident {
fn eq(&self, other: &Self) -> bool {
self.as_str() == other.as_str()
}
}
impl SpanlessEq for RangeSyntax {
fn eq(&self, _other: &Self) -> bool {
match self {
RangeSyntax::DotDotDot | RangeSyntax::DotDotEq => true,
}
}
}
impl SpanlessEq for Param {
fn eq(&self, other: &Self) -> bool {
let Param {
attrs,
ty,
pat,
id,
span: _,
is_placeholder,
} = self;
let Param {
attrs: attrs2,
ty: ty2,
pat: pat2,
id: id2,
span: _,
is_placeholder: is_placeholder2,
} = other;
SpanlessEq::eq(id, id2)
&& SpanlessEq::eq(is_placeholder, is_placeholder2)
&& (matches!(ty.kind, TyKind::Err(_))
|| matches!(ty2.kind, TyKind::Err(_))
|| SpanlessEq::eq(attrs, attrs2)
&& SpanlessEq::eq(ty, ty2)
&& SpanlessEq::eq(pat, pat2))
}
}
impl SpanlessEq for TokenKind {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => SpanlessEq::eq(this, other),
(TokenKind::DotDotEq | TokenKind::DotDotDot, _) => match other {
TokenKind::DotDotEq | TokenKind::DotDotDot => true,
_ => false,
},
_ => self == other,
}
}
}
impl SpanlessEq for TokenStream {
fn eq(&self, other: &Self) -> bool {
let mut this_trees = self.iter();
let mut other_trees = other.iter();
loop {
let this = match this_trees.next() {
None => return other_trees.next().is_none(),
Some(tree) => tree,
};
let other = match other_trees.next() {
None => return false,
Some(tree) => tree,
};
if SpanlessEq::eq(this, other) {
continue;
}
if let (TokenTree::Token(this, _), TokenTree::Token(other, _)) = (this, other) {
if match (&this.kind, &other.kind) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => {
SpanlessEq::eq(this, other)
}
(TokenKind::DocComment(_kind, style, symbol), TokenKind::Pound) => {
doc_comment(*style, *symbol, &mut other_trees)
}
(TokenKind::Pound, TokenKind::DocComment(_kind, style, symbol)) => {
doc_comment(*style, *symbol, &mut this_trees)
}
_ => false,
} {
continue;
}
}
return false;
}
}
}
fn doc_comment<'a>(
style: AttrStyle,
unescaped: Symbol,
trees: &mut impl Iterator<Item = &'a TokenTree>,
) -> bool {
if match style {
AttrStyle::Outer => false,
AttrStyle::Inner => true,
} {
match trees.next() {
Some(TokenTree::Token(
Token {
kind: TokenKind::Bang,
span: _,
},
_spacing,
)) => {}
_ => return false,
}
}
let stream = match trees.next() {
Some(TokenTree::Delimited(_span, _spacing, Delimiter::Bracket, stream)) => stream,
_ => return false,
};
let mut trees = stream.iter();
match trees.next() {
Some(TokenTree::Token(
Token {
kind: TokenKind::Ident(symbol, IdentIsRaw::No),
span: _,
},
_spacing,
)) if *symbol == sym::doc => {}
_ => return false,
}
match trees.next() {
Some(TokenTree::Token(
Token {
kind: TokenKind::Eq,
span: _,
},
_spacing,
)) => {}
_ => return false,
}
match trees.next() {
Some(TokenTree::Token(token, _spacing)) => {
is_escaped_literal_token(token, unescaped) && trees.next().is_none()
}
_ => false,
}
}
fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
match token {
Token {
kind: TokenKind::Literal(lit),
span: _,
} => match MetaItemLit::from_token_lit(*lit, DUMMY_SP) {
Ok(lit) => is_escaped_literal_meta_item_lit(&lit, unescaped),
Err(_) => false,
},
_ => false,
}
}
fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool {
match lit {
MetaItemLit {
symbol: _,
suffix: None,
kind,
span: _,
} => is_escaped_lit_kind(kind, unescaped),
_ => false,
}
}
fn is_escaped_lit(lit: &Lit, unescaped: Symbol) -> bool {
match lit {
Lit {
kind: token::LitKind::Str,
symbol: _,
suffix: None,
} => match LitKind::from_token_lit(*lit) {
Ok(lit_kind) => is_escaped_lit_kind(&lit_kind, unescaped),
_ => false,
},
_ => false,
}
}
fn is_escaped_lit_kind(kind: &LitKind, unescaped: Symbol) -> bool {
match kind {
LitKind::Str(symbol, StrStyle::Cooked) => {
symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', "")
}
_ => false,
}
}
impl SpanlessEq for LazyAttrTokenStream {
fn eq(&self, other: &Self) -> bool {
let this = self.to_attr_token_stream();
let other = other.to_attr_token_stream();
SpanlessEq::eq(&this, &other)
}
}
impl SpanlessEq for AttrKind {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(AttrKind::Normal(normal), AttrKind::Normal(normal2)) => {
SpanlessEq::eq(normal, normal2)
}
(AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2, symbol2)) => {
SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2)
}
(AttrKind::DocComment(kind, unescaped), AttrKind::Normal(normal2)) => {
match kind {
CommentKind::Line | CommentKind::Block => {}
}
let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
SpanlessEq::eq(&path, &normal2.item.path)
&& match &normal2.item.args {
AttrArgs::Empty | AttrArgs::Delimited(_) => false,
AttrArgs::Eq { eq_span: _, expr } => match &expr.kind {
ExprKind::Lit(lit) => is_escaped_lit(lit, *unescaped),
_ => false,
},
}
}
(AttrKind::Normal(_), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
}
}
}
impl SpanlessEq for FormatArguments {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(self.all_args(), other.all_args())
}
}

6
vendor/syn/tests/common/mod.rs vendored Normal file
View File

@@ -0,0 +1,6 @@
#![allow(dead_code)]
#![allow(clippy::module_name_repetitions, clippy::shadow_unrelated)]
pub mod eq;
pub mod parse;
pub mod visit;

45
vendor/syn/tests/common/parse.rs vendored Normal file
View File

@@ -0,0 +1,45 @@
extern crate rustc_ast;
extern crate rustc_driver;
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_session;
extern crate rustc_span;
use rustc_ast::ast;
use rustc_session::parse::ParseSess;
use rustc_span::FileName;
use std::panic;
pub fn librustc_expr(input: &str) -> Option<Box<ast::Expr>> {
match panic::catch_unwind(|| {
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
let sess = ParseSess::new(locale_resources);
let name = FileName::Custom("test_precedence".to_string());
let mut parser = parse::new_parser_from_source_str(&sess, name, input.to_string()).unwrap();
let presult = parser.parse_expr();
match presult {
Ok(expr) => Some(expr),
Err(diagnostic) => {
diagnostic.emit();
None
}
}
}) {
Ok(Some(e)) => Some(e),
Ok(None) => None,
Err(_) => {
errorf!("librustc panicked\n");
None
}
}
}
pub fn syn_expr(input: &str) -> Option<syn::Expr> {
match syn::parse_str(input) {
Ok(e) => Some(e),
Err(msg) => {
errorf!("syn failed to parse\n{:?}\n", msg);
None
}
}
}

119
vendor/syn/tests/common/visit.rs vendored Normal file
View File

@@ -0,0 +1,119 @@
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
use std::mem;
use syn::visit_mut::{self, VisitMut};
use syn::{Expr, File, Generics, LifetimeParam, MacroDelimiter, Stmt, StmtMacro, TypeParam};
pub struct FlattenParens {
discard_paren_attrs: bool,
}
impl FlattenParens {
pub fn discard_attrs() -> Self {
FlattenParens {
discard_paren_attrs: true,
}
}
pub fn combine_attrs() -> Self {
FlattenParens {
discard_paren_attrs: false,
}
}
pub fn visit_token_stream_mut(tokens: &mut TokenStream) {
*tokens = mem::take(tokens)
.into_iter()
.flat_map(|tt| {
if let TokenTree::Group(group) = tt {
let delimiter = group.delimiter();
let mut content = group.stream();
Self::visit_token_stream_mut(&mut content);
if let Delimiter::Parenthesis = delimiter {
content
} else {
TokenStream::from(TokenTree::Group(Group::new(delimiter, content)))
}
} else {
TokenStream::from(tt)
}
})
.collect();
}
}
impl VisitMut for FlattenParens {
fn visit_expr_mut(&mut self, e: &mut Expr) {
while let Expr::Paren(paren) = e {
let paren_attrs = mem::take(&mut paren.attrs);
*e = mem::replace(&mut *paren.expr, Expr::PLACEHOLDER);
if !paren_attrs.is_empty() && !self.discard_paren_attrs {
let nested_attrs = match e {
Expr::Assign(e) => &mut e.attrs,
Expr::Binary(e) => &mut e.attrs,
Expr::Cast(e) => &mut e.attrs,
_ => unimplemented!(),
};
assert!(nested_attrs.is_empty());
*nested_attrs = paren_attrs;
}
}
visit_mut::visit_expr_mut(self, e);
}
}
pub struct AsIfPrinted;
impl VisitMut for AsIfPrinted {
fn visit_file_mut(&mut self, file: &mut File) {
file.shebang = None;
visit_mut::visit_file_mut(self, file);
}
fn visit_generics_mut(&mut self, generics: &mut Generics) {
if generics.params.is_empty() {
generics.lt_token = None;
generics.gt_token = None;
}
if let Some(where_clause) = &generics.where_clause {
if where_clause.predicates.is_empty() {
generics.where_clause = None;
}
}
visit_mut::visit_generics_mut(self, generics);
}
fn visit_lifetime_param_mut(&mut self, param: &mut LifetimeParam) {
if param.bounds.is_empty() {
param.colon_token = None;
}
visit_mut::visit_lifetime_param_mut(self, param);
}
fn visit_stmt_mut(&mut self, stmt: &mut Stmt) {
if let Stmt::Expr(expr, semi) = stmt {
if let Expr::Macro(e) = expr {
if match e.mac.delimiter {
MacroDelimiter::Brace(_) => true,
MacroDelimiter::Paren(_) | MacroDelimiter::Bracket(_) => semi.is_some(),
} {
let Expr::Macro(expr) = mem::replace(expr, Expr::PLACEHOLDER) else {
unreachable!();
};
*stmt = Stmt::Macro(StmtMacro {
attrs: expr.attrs,
mac: expr.mac,
semi_token: *semi,
});
}
}
}
visit_mut::visit_stmt_mut(self, stmt);
}
fn visit_type_param_mut(&mut self, param: &mut TypeParam) {
if param.bounds.is_empty() {
param.colon_token = None;
}
visit_mut::visit_type_param_mut(self, param);
}
}

5239
vendor/syn/tests/debug/gen.rs vendored Normal file

File diff suppressed because it is too large Load Diff

147
vendor/syn/tests/debug/mod.rs vendored Normal file
View File

@@ -0,0 +1,147 @@
#![allow(
clippy::no_effect_underscore_binding,
clippy::too_many_lines,
clippy::used_underscore_binding
)]
#[rustfmt::skip]
mod gen;
use proc_macro2::{Ident, Literal, TokenStream};
use ref_cast::RefCast;
use std::fmt::{self, Debug};
use std::ops::Deref;
use syn::punctuated::Punctuated;
#[derive(RefCast)]
#[repr(transparent)]
pub struct Lite<T: ?Sized> {
value: T,
}
#[allow(non_snake_case)]
pub fn Lite<T: ?Sized>(value: &T) -> &Lite<T> {
Lite::ref_cast(value)
}
impl<T: ?Sized> Deref for Lite<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl Debug for Lite<bool> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<u32> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<usize> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<String> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{:?}", self.value)
}
}
impl Debug for Lite<Ident> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{:?}", self.value.to_string())
}
}
impl Debug for Lite<Literal> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.value)
}
}
impl Debug for Lite<TokenStream> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let string = self.value.to_string();
if string.len() <= 80 {
write!(formatter, "TokenStream(`{}`)", self.value)
} else {
formatter
.debug_tuple("TokenStream")
.field(&format_args!("`{}`", string))
.finish()
}
}
}
impl<T> Debug for Lite<&T>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(Lite(self.value), formatter)
}
}
impl<T> Debug for Lite<Box<T>>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(Lite(&*self.value), formatter)
}
}
impl<T> Debug for Lite<Vec<T>>
where
Lite<T>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter
.debug_list()
.entries(self.value.iter().map(Lite))
.finish()
}
}
impl<T, P> Debug for Lite<Punctuated<T, P>>
where
Lite<T>: Debug,
Lite<P>: Debug,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
let mut list = formatter.debug_list();
for pair in self.pairs() {
let (node, punct) = pair.into_tuple();
list.entry(Lite(node));
list.entries(punct.map(Lite));
}
list.finish()
}
}
struct Present;
impl Debug for Present {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Some")
}
}
struct Option {
present: bool,
}
impl Debug for Option {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(if self.present { "Some" } else { "None" })
}
}

7
vendor/syn/tests/macros/mod.rs vendored Normal file
View File

@@ -0,0 +1,7 @@
macro_rules! errorf {
($($tt:tt)*) => {{
use ::std::io::Write;
let stderr = ::std::io::stderr();
write!(stderr.lock(), $($tt)*).unwrap();
}};
}

5
vendor/syn/tests/regression.rs vendored Normal file
View File

@@ -0,0 +1,5 @@
#![allow(clippy::let_underscore_untyped, clippy::uninlined_format_args)]
mod regression {
automod::dir!("tests/regression");
}

View File

@@ -0,0 +1,5 @@
#[test]
fn issue1108() {
let data = "impl<x<>>::x for";
let _ = syn::parse_file(data);
}

View File

@@ -0,0 +1,32 @@
use proc_macro2::{Delimiter, Group};
use quote::quote;
#[test]
fn main() {
// Okay. Rustc allows top-level `static` with no value syntactically, but
// not semantically. Syn parses as Item::Verbatim.
let tokens = quote! {
pub static FOO: usize;
pub static BAR: usize;
};
let file = syn::parse2::<syn::File>(tokens).unwrap();
println!("{:#?}", file);
// Okay.
let inner = Group::new(
Delimiter::None,
quote!(static FOO: usize = 0; pub static BAR: usize = 0),
);
let tokens = quote!(pub #inner;);
let file = syn::parse2::<syn::File>(tokens).unwrap();
println!("{:#?}", file);
// Formerly parser crash.
let inner = Group::new(
Delimiter::None,
quote!(static FOO: usize; pub static BAR: usize),
);
let tokens = quote!(pub #inner;);
let file = syn::parse2::<syn::File>(tokens).unwrap();
println!("{:#?}", file);
}

630
vendor/syn/tests/repo/mod.rs vendored Normal file
View File

@@ -0,0 +1,630 @@
#![allow(clippy::manual_assert)]
mod progress;
use self::progress::Progress;
use anyhow::Result;
use flate2::read::GzDecoder;
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use rayon::ThreadPoolBuilder;
use std::collections::BTreeSet;
use std::env;
use std::ffi::OsStr;
use std::fs;
use std::path::{Path, PathBuf};
use tar::Archive;
use walkdir::{DirEntry, WalkDir};
// nightly-2025-08-14
const REVISION: &str = "3672a55b7cfd0a12e7097197b6242872473ffaa7";
#[rustfmt::skip]
static EXCLUDE_FILES: &[&str] = &[
// TODO: const traits: `pub const trait Trait {}`
// https://github.com/dtolnay/syn/issues/1887
"src/tools/clippy/tests/ui/assign_ops.rs",
"src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.rs",
"src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs",
"src/tools/rust-analyzer/crates/test-utils/src/minicore.rs",
// TODO: unsafe binders: `unsafe<'a> &'a T`
// https://github.com/dtolnay/syn/issues/1791
"src/tools/rustfmt/tests/source/unsafe-binders.rs",
"src/tools/rustfmt/tests/target/unsafe-binders.rs",
"tests/mir-opt/gvn_on_unsafe_binder.rs",
"tests/rustdoc/auxiliary/unsafe-binder-dep.rs",
"tests/rustdoc/unsafe-binder.rs",
"tests/ui/unsafe-binders/cat-projection.rs",
// TODO: unsafe fields: `struct S { unsafe field: T }`
// https://github.com/dtolnay/syn/issues/1792
"src/tools/clippy/tests/ui/derive.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_list.rs",
"src/tools/rustfmt/tests/source/unsafe-field.rs",
"src/tools/rustfmt/tests/target/unsafe-field.rs",
"tests/ui/unsafe-fields/auxiliary/unsafe-fields-crate-dep.rs",
// TODO: guard patterns: `match expr { (A if f()) | (B if g()) => {} }`
// https://github.com/dtolnay/syn/issues/1793
"src/tools/rustfmt/tests/target/guard_patterns.rs",
"tests/ui/pattern/rfc-3637-guard-patterns/only-gather-locals-once.rs",
// TODO: struct field default: `struct S { field: i32 = 1 }`
// https://github.com/dtolnay/syn/issues/1774
"compiler/rustc_errors/src/markdown/parse.rs",
"compiler/rustc_session/src/config.rs",
"src/tools/clippy/tests/ui/exhaustive_items.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/record_field_default_values.rs",
"src/tools/rustfmt/tests/source/default-field-values.rs",
"src/tools/rustfmt/tests/target/default-field-values.rs",
"tests/ui/structs/default-field-values/auxiliary/struct_field_default.rs",
"tests/ui/structs/default-field-values/const-trait-default-field-value.rs",
"tests/ui/structs/default-field-values/field-references-param.rs",
"tests/ui/structs/default-field-values/support.rs",
"tests/ui/structs/default-field-values/use-normalized-ty-for-default-struct-value.rs",
// TODO: return type notation: `where T: Trait<method(): Send>` and `where T::method(..): Send`
// https://github.com/dtolnay/syn/issues/1434
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rs",
"src/tools/rustfmt/tests/target/return-type-notation.rs",
"tests/rustdoc-json/return-type-notation.rs",
"tests/rustdoc/return-type-notation.rs",
"tests/ui/associated-type-bounds/all-generics-lookup.rs",
"tests/ui/associated-type-bounds/implied-from-self-where-clause.rs",
"tests/ui/associated-type-bounds/return-type-notation/basic.rs",
"tests/ui/associated-type-bounds/return-type-notation/higher-ranked-bound-works.rs",
"tests/ui/associated-type-bounds/return-type-notation/namespace-conflict.rs",
"tests/ui/associated-type-bounds/return-type-notation/path-constrained-in-method.rs",
"tests/ui/associated-type-bounds/return-type-notation/path-self-qself.rs",
"tests/ui/associated-type-bounds/return-type-notation/path-works.rs",
"tests/ui/associated-type-bounds/return-type-notation/unpretty-parenthesized.rs",
"tests/ui/async-await/return-type-notation/issue-110963-late.rs",
"tests/ui/async-await/return-type-notation/normalizing-self-auto-trait-issue-109924.rs",
"tests/ui/async-await/return-type-notation/rtn-implied-in-supertrait.rs",
"tests/ui/async-await/return-type-notation/super-method-bound.rs",
"tests/ui/async-await/return-type-notation/supertrait-bound.rs",
"tests/ui/borrowck/alias-liveness/rtn-static.rs",
"tests/ui/feature-gates/feature-gate-return_type_notation.rs",
// TODO: lazy type alias syntax with where-clause in trailing position
// https://github.com/dtolnay/syn/issues/1525
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/type_item_where_clause.rs",
"src/tools/rustfmt/tests/source/type-alias-where-clauses-with-comments.rs",
"src/tools/rustfmt/tests/source/type-alias-where-clauses.rs",
"src/tools/rustfmt/tests/target/type-alias-where-clauses-with-comments.rs",
"src/tools/rustfmt/tests/target/type-alias-where-clauses.rs",
"tests/rustdoc/typedef-inner-variants-lazy_type_alias.rs",
// TODO: gen blocks and functions
// https://github.com/dtolnay/syn/issues/1526
"compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs",
"compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs",
"compiler/rustc_metadata/src/rmeta/decoder.rs",
"compiler/rustc_middle/src/ty/closure.rs",
"compiler/rustc_middle/src/ty/context.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/gen_blocks.rs",
"tests/ui/async-await/async-drop/assign-incompatible-types.rs",
"tests/ui/coroutine/async-gen-deduce-yield.rs",
"tests/ui/coroutine/async-gen-yield-ty-is-unit.rs",
"tests/ui/coroutine/async_gen_fn_iter.rs",
"tests/ui/coroutine/gen_block_is_fused_iter.rs",
"tests/ui/coroutine/gen_block_is_iter.rs",
"tests/ui/coroutine/gen_block_iterate.rs",
"tests/ui/coroutine/gen_fn_iter.rs",
"tests/ui/coroutine/gen_fn_lifetime_capture.rs",
"tests/ui/coroutine/other-attribute-on-gen.rs",
"tests/ui/coroutine/return-types-diverge.rs",
"tests/ui/higher-ranked/builtin-closure-like-bounds.rs",
"tests/ui/sanitizer/cfi/coroutine.rs",
// TODO: postfix yield
// https://github.com/dtolnay/syn/issues/1890
"tests/pretty/postfix-yield.rs",
"tests/ui/coroutine/postfix-yield.rs",
// TODO: `!` as a pattern
// https://github.com/dtolnay/syn/issues/1546
"tests/mir-opt/building/match/never_patterns.rs",
"tests/pretty/never-pattern.rs",
"tests/ui/rfcs/rfc-0000-never_patterns/always-read-in-closure-capture.rs",
"tests/ui/rfcs/rfc-0000-never_patterns/diverges.rs",
"tests/ui/rfcs/rfc-0000-never_patterns/use-bindings.rs",
// TODO: async trait bounds: `impl async Fn()`
// https://github.com/dtolnay/syn/issues/1628
"src/tools/miri/tests/pass/async-closure-captures.rs",
"src/tools/miri/tests/pass/async-closure-drop.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/async_trait_bound.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs",
"src/tools/rustfmt/tests/target/asyncness.rs",
"tests/coverage/async_closure.rs",
"tests/ui/async-await/async-closures/async-fn-mut-for-async-fn.rs",
"tests/ui/async-await/async-closures/async-fn-once-for-async-fn.rs",
"tests/ui/async-await/async-closures/auxiliary/foreign.rs",
"tests/ui/async-await/async-closures/body-check-on-non-fnmut.rs",
"tests/ui/async-await/async-closures/box-deref-in-debuginfo.rs",
"tests/ui/async-await/async-closures/brand.rs",
"tests/ui/async-await/async-closures/captures.rs",
"tests/ui/async-await/async-closures/clone-closure.rs",
"tests/ui/async-await/async-closures/constrained-but-no-upvars-yet.rs",
"tests/ui/async-await/async-closures/debuginfo-by-move-body.rs",
"tests/ui/async-await/async-closures/drop.rs",
"tests/ui/async-await/async-closures/force-move-due-to-inferred-kind.rs",
"tests/ui/async-await/async-closures/foreign.rs",
"tests/ui/async-await/async-closures/inline-body.rs",
"tests/ui/async-await/async-closures/mangle.rs",
"tests/ui/async-await/async-closures/moro-example.rs",
"tests/ui/async-await/async-closures/move-is-async-fn.rs",
"tests/ui/async-await/async-closures/mut-ref-reborrow.rs",
"tests/ui/async-await/async-closures/no-borrow-from-env.rs",
"tests/ui/async-await/async-closures/non-copy-arg-does-not-force-inner-move.rs",
"tests/ui/async-await/async-closures/overlapping-projs.rs",
"tests/ui/async-await/async-closures/precise-captures.rs",
"tests/ui/async-await/async-closures/refd.rs",
"tests/ui/async-await/async-closures/signature-deduction.rs",
"tests/ui/async-await/async-fn/edition-2015-not-async-bound.rs",
"tests/ui/async-await/async-fn/higher-ranked-async-fn.rs",
"tests/ui/async-await/async-fn/impl-trait.rs",
"tests/ui/async-await/async-fn/project.rs",
"tests/ui/async-await/async-fn/sugar.rs",
// TODO: mutable by-reference bindings (mut ref)
// https://github.com/dtolnay/syn/issues/1629
"src/tools/rustfmt/tests/source/mut_ref.rs",
"src/tools/rustfmt/tests/target/mut_ref.rs",
"tests/ui/mut/mut-ref.rs",
// TODO: postfix match
// https://github.com/dtolnay/syn/issues/1630
"src/tools/clippy/tests/ui/unnecessary_semicolon.rs",
"src/tools/rustfmt/tests/source/postfix-match/pf-match.rs",
"src/tools/rustfmt/tests/target/postfix-match/pf-match.rs",
"tests/pretty/postfix-match/simple-matches.rs",
"tests/ui/match/postfix-match/no-unused-parens.rs",
"tests/ui/match/postfix-match/pf-match-chain.rs",
"tests/ui/match/postfix-match/postfix-match.rs",
// TODO: delegation: `reuse Trait::bar { Box::new(self.0) }`
// https://github.com/dtolnay/syn/issues/1580
"tests/pretty/delegation.rs",
"tests/pretty/hir-delegation.rs",
"tests/ui/delegation/body-identity-glob.rs",
"tests/ui/delegation/body-identity-list.rs",
"tests/ui/delegation/explicit-paths-in-traits-pass.rs",
"tests/ui/delegation/explicit-paths-pass.rs",
"tests/ui/delegation/explicit-paths-signature-pass.rs",
"tests/ui/delegation/fn-header.rs",
"tests/ui/delegation/generics/free-fn-to-free-fn-pass.rs",
"tests/ui/delegation/generics/free-fn-to-trait-method-pass.rs",
"tests/ui/delegation/generics/impl-to-free-fn-pass.rs",
"tests/ui/delegation/generics/impl-trait-to-trait-method-pass.rs",
"tests/ui/delegation/generics/inherent-impl-to-trait-method-pass.rs",
"tests/ui/delegation/generics/trait-method-to-other-pass.rs",
"tests/ui/delegation/glob-glob.rs",
"tests/ui/delegation/glob-override.rs",
"tests/ui/delegation/glob.rs",
"tests/ui/delegation/impl-trait.rs",
"tests/ui/delegation/list.rs",
"tests/ui/delegation/macro-inside-glob.rs",
"tests/ui/delegation/macro-inside-list.rs",
"tests/ui/delegation/method-call-priority.rs",
"tests/ui/delegation/parse.rs",
"tests/ui/delegation/rename.rs",
"tests/ui/delegation/self-coercion.rs",
// TODO: for await
// https://github.com/dtolnay/syn/issues/1631
"tests/ui/async-await/for-await-2015.rs",
"tests/ui/async-await/for-await-passthrough.rs",
"tests/ui/async-await/for-await.rs",
// TODO: unparenthesized half-open range pattern inside slice pattern: `[1..]`
// https://github.com/dtolnay/syn/issues/1769
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/range_pat.rs",
"tests/ui/consts/miri_unleashed/const_refers_to_static_cross_crate.rs",
// TODO: pinned type sugar: `&pin const Self`
// https://github.com/dtolnay/syn/issues/1770
"src/tools/rustfmt/tests/source/pin_sugar.rs",
"src/tools/rustfmt/tests/target/pin_sugar.rs",
"tests/pretty/pin-ergonomics-hir.rs",
"tests/pretty/pin-ergonomics.rs",
"tests/ui/pin-ergonomics/borrow.rs",
"tests/ui/pin-ergonomics/sugar-self.rs",
"tests/ui/pin-ergonomics/sugar.rs",
// TODO: attributes on where-predicates
// https://github.com/dtolnay/syn/issues/1705
"src/tools/rustfmt/tests/target/cfg_attribute_in_where.rs",
// TODO: super let
// https://github.com/dtolnay/syn/issues/1889
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/let_stmt.rs",
// TODO: "ergonomic clones": `f(obj.use)`, `thread::spawn(use || f(obj))`, `async use`
// https://github.com/dtolnay/syn/issues/1802
"tests/codegen-llvm/ergonomic-clones/closure.rs",
"tests/mir-opt/ergonomic-clones/closure.rs",
"tests/ui/ergonomic-clones/async/basic.rs",
"tests/ui/ergonomic-clones/closure/basic.rs",
"tests/ui/ergonomic-clones/closure/const-closure.rs",
"tests/ui/ergonomic-clones/closure/mutation.rs",
"tests/ui/ergonomic-clones/closure/nested.rs",
"tests/ui/ergonomic-clones/closure/once-move-out-on-heap.rs",
"tests/ui/ergonomic-clones/closure/with-binders.rs",
"tests/ui/ergonomic-clones/dotuse/basic.rs",
"tests/ui/ergonomic-clones/dotuse/block.rs",
// TODO: contracts
// https://github.com/dtolnay/syn/issues/1892
"tests/ui/contracts/internal_machinery/contract-ast-extensions-nest.rs",
"tests/ui/contracts/internal_machinery/contract-ast-extensions-tail.rs",
"tests/ui/contracts/internal_machinery/contracts-lowering-ensures-is-not-inherited-when-nesting.rs",
"tests/ui/contracts/internal_machinery/contracts-lowering-requires-is-not-inherited-when-nesting.rs",
// TODO: frontmatter
// https://github.com/dtolnay/syn/issues/1893
"tests/ui/frontmatter/auxiliary/lib.rs",
"tests/ui/frontmatter/dot-in-infostring-non-leading.rs",
"tests/ui/frontmatter/escape.rs",
"tests/ui/frontmatter/frontmatter-inner-hyphens-1.rs",
"tests/ui/frontmatter/frontmatter-inner-hyphens-2.rs",
"tests/ui/frontmatter/frontmatter-non-lexible-tokens.rs",
"tests/ui/frontmatter/frontmatter-whitespace-3.rs",
"tests/ui/frontmatter/frontmatter-whitespace-4.rs",
"tests/ui/frontmatter/shebang.rs",
"tests/ui/unpretty/frontmatter.rs",
// TODO: `|| .. .method()`
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_range_method_call.rs",
"src/tools/rustfmt/tests/source/issue-4808.rs",
// Negative inherent impl: `impl !Box<JoinHandle> {}`
"src/tools/rustfmt/tests/source/negative-impl.rs",
"src/tools/rustfmt/tests/target/negative-impl.rs",
// Compile-fail expr parameter in const generic position: `f::<1 + 2>()`
"tests/ui/const-generics/early/closing-args-token.rs",
"tests/ui/const-generics/early/const-expression-parameter.rs",
// Compile-fail variadics in not the last position of a function parameter list
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/fn_def_param.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/param_list_vararg.rs",
"tests/ui/parser/variadic-ffi-syntactic-pass.rs",
// Need at least one trait in impl Trait, no such type as impl 'static
"tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs",
// Negative polarity trait bound: `where T: !Copy`
"src/tools/rustfmt/tests/target/negative-bounds.rs",
"tests/ui/traits/negative-bounds/supertrait.rs",
// Const impl that is not a trait impl: `impl ~const T {}`
"tests/ui/traits/const-traits/syntax.rs",
// Lifetimes and types out of order in angle bracketed path arguments
"tests/ui/parser/constraints-before-generic-args-syntactic-pass.rs",
// Deprecated anonymous parameter syntax in traits
"src/tools/rustfmt/tests/source/trait.rs",
"src/tools/rustfmt/tests/target/trait.rs",
"tests/pretty/hir-fn-params.rs",
"tests/rustdoc/anon-fn-params.rs",
"tests/rustdoc/auxiliary/ext-anon-fn-params.rs",
"tests/ui/fn/anonymous-parameters-trait-13105.rs",
"tests/ui/issues/issue-34074.rs",
"tests/ui/proc-macro/trait-fn-args-2015.rs",
"tests/ui/trait-bounds/anonymous-parameters-13775.rs",
// Deprecated where-clause location
"src/tools/rustfmt/tests/source/issue_4257.rs",
"src/tools/rustfmt/tests/source/issue_4911.rs",
"src/tools/rustfmt/tests/target/issue_4257.rs",
"src/tools/rustfmt/tests/target/issue_4911.rs",
"tests/pretty/gat-bounds.rs",
"tests/rustdoc/generic-associated-types/gats.rs",
// Deprecated trait object syntax with parenthesized generic arguments and no dyn keyword
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/path_fn_trait_args.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/typepathfn_with_coloncolon.rs",
"src/tools/rustfmt/tests/source/attrib.rs",
"src/tools/rustfmt/tests/source/closure.rs",
"src/tools/rustfmt/tests/source/existential_type.rs",
"src/tools/rustfmt/tests/source/fn-simple.rs",
"src/tools/rustfmt/tests/source/fn_args_layout-vertical.rs",
"src/tools/rustfmt/tests/source/issue-4689/one.rs",
"src/tools/rustfmt/tests/source/issue-4689/two.rs",
"src/tools/rustfmt/tests/source/paths.rs",
"src/tools/rustfmt/tests/source/structs.rs",
"src/tools/rustfmt/tests/target/attrib.rs",
"src/tools/rustfmt/tests/target/closure.rs",
"src/tools/rustfmt/tests/target/existential_type.rs",
"src/tools/rustfmt/tests/target/fn-simple.rs",
"src/tools/rustfmt/tests/target/fn.rs",
"src/tools/rustfmt/tests/target/fn_args_layout-vertical.rs",
"src/tools/rustfmt/tests/target/issue-4689/one.rs",
"src/tools/rustfmt/tests/target/issue-4689/two.rs",
"src/tools/rustfmt/tests/target/paths.rs",
"src/tools/rustfmt/tests/target/structs.rs",
"tests/codegen-units/item-collection/non-generic-closures.rs",
"tests/debuginfo/recursive-enum.rs",
"tests/pretty/closure-reform-pretty.rs",
"tests/run-make/reproducible-build-2/reproducible-build.rs",
"tests/run-make/reproducible-build/reproducible-build.rs",
"tests/ui/impl-trait/generic-with-implicit-hrtb-without-dyn.rs",
"tests/ui/lifetimes/auxiliary/lifetime_bound_will_change_warning_lib.rs",
"tests/ui/lifetimes/bare-trait-object-borrowck.rs",
"tests/ui/lifetimes/bare-trait-object.rs",
"tests/ui/parser/bounds-obj-parens.rs",
// Various extensions to Rust syntax made up by rust-analyzer
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/assoc_type_bound.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/const_param_default_path.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/field_expr.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/generic_arg_bounds.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/global_asm.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/ref_expr.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/use_tree_abs_star.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0015_use_tree.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0029_range_forms.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0051_parameter_attrs.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0055_dot_dot_dot.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0068_item_modifiers.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0038_endless_inclusive_range.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0045_ambiguous_trait_object.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0046_mutable_const_item.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0224_dangling_dyn.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/dangling_impl_reference.rs",
"src/tools/rust-analyzer/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rs",
// Placeholder syntax for "throw expressions"
"compiler/rustc_errors/src/translation.rs",
"compiler/rustc_expand/src/module.rs",
"compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs",
"src/tools/clippy/tests/ui/needless_return.rs",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/yeet_expr.rs",
"tests/pretty/yeet-expr.rs",
"tests/ui/try-trait/yeet-for-option.rs",
"tests/ui/try-trait/yeet-for-result.rs",
// Edition 2015 code using identifiers that are now keywords
// TODO: some of these we should probably parse
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rs",
"src/tools/rustfmt/tests/source/configs/indent_style/block_call.rs",
"src/tools/rustfmt/tests/source/configs/use_try_shorthand/false.rs",
"src/tools/rustfmt/tests/source/configs/use_try_shorthand/true.rs",
"src/tools/rustfmt/tests/source/issue_1306.rs",
"src/tools/rustfmt/tests/source/try-conversion.rs",
"src/tools/rustfmt/tests/target/configs/indent_style/block_call.rs",
"src/tools/rustfmt/tests/target/configs/use_try_shorthand/false.rs",
"src/tools/rustfmt/tests/target/issue-1681.rs",
"src/tools/rustfmt/tests/target/issue_1306.rs",
"tests/ui/dyn-keyword/dyn-2015-no-warnings-without-lints.rs",
"tests/ui/editions/edition-keywords-2015-2015.rs",
"tests/ui/editions/edition-keywords-2015-2018.rs",
"tests/ui/lint/keyword-idents/auxiliary/multi_file_submod.rs",
"tests/ui/lint/lint_pre_expansion_extern_module_aux.rs",
"tests/ui/macros/macro-comma-support-rpass.rs",
"tests/ui/macros/try-macro.rs",
"tests/ui/parser/extern-crate-async.rs",
"tests/ui/try-block/try-is-identifier-edition2015.rs",
// Excessive nesting
"tests/ui/issues/issue-74564-if-expr-stack-overflow.rs",
// Testing tools on invalid syntax
"src/tools/clippy/tests/ui/non_expressive_names_error_recovery.rs",
"src/tools/rustfmt/tests/coverage/target/comments.rs",
"src/tools/rustfmt/tests/parser/issue-4126/invalid.rs",
"src/tools/rustfmt/tests/parser/issue_4418.rs",
"src/tools/rustfmt/tests/parser/stashed-diag.rs",
"src/tools/rustfmt/tests/parser/stashed-diag2.rs",
"src/tools/rustfmt/tests/parser/unclosed-delims/issue_4466.rs",
"src/tools/rustfmt/tests/source/configs/disable_all_formatting/true.rs",
"src/tools/rustfmt/tests/source/configs/spaces_around_ranges/false.rs",
"src/tools/rustfmt/tests/source/configs/spaces_around_ranges/true.rs",
"src/tools/rustfmt/tests/source/type.rs",
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/false.rs",
"src/tools/rustfmt/tests/target/configs/spaces_around_ranges/true.rs",
"src/tools/rustfmt/tests/target/type.rs",
"src/tools/rustfmt/tests/target/unsafe_extern_blocks.rs",
"tests/run-make/translation/test.rs",
"tests/ui/generics/issue-94432-garbage-ice.rs",
// Generated file containing a top-level expression, used with `include!`
"compiler/rustc_codegen_gcc/src/intrinsic/archs.rs",
// Not actually test cases
"tests/ui/lint/expansion-time-include.rs",
"tests/ui/macros/auxiliary/macro-comma-support.rs",
"tests/ui/macros/auxiliary/macro-include-items-expr.rs",
"tests/ui/macros/include-single-expr-helper.rs",
"tests/ui/macros/include-single-expr-helper-1.rs",
"tests/ui/parser/issues/auxiliary/issue-21146-inc.rs",
];
#[rustfmt::skip]
static EXCLUDE_DIRS: &[&str] = &[
// Inputs that intentionally do not parse
"src/tools/rust-analyzer/crates/parser/test_data/parser/err",
"src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err",
// Inputs that lex but do not necessarily parse
"src/tools/rust-analyzer/crates/parser/test_data/lexer",
// Inputs that used to crash rust-analyzer, but aren't necessarily supposed to parse
"src/tools/rust-analyzer/crates/syntax/test_data/parser/fuzz-failures",
"src/tools/rust-analyzer/crates/syntax/test_data/reparse/fuzz-failures",
// Inputs that crash rustc, making no claim about whether they are valid Rust
"tests/crashes",
];
// Directories in which a .stderr implies the corresponding .rs is not expected
// to work.
static UI_TEST_DIRS: &[&str] = &["tests/ui", "tests/rustdoc-ui"];
pub fn for_each_rust_file(for_each: impl Fn(&Path) + Sync + Send) {
let mut rs_files = BTreeSet::new();
let repo_dir = Path::new("tests/rust");
for entry in WalkDir::new(repo_dir)
.into_iter()
.filter_entry(base_dir_filter)
{
let entry = entry.unwrap();
if !entry.file_type().is_dir() {
rs_files.insert(entry.into_path());
}
}
for ui_test_dir in UI_TEST_DIRS {
for entry in WalkDir::new(repo_dir.join(ui_test_dir)) {
let mut path = entry.unwrap().into_path();
if path.extension() == Some(OsStr::new("stderr")) {
loop {
rs_files.remove(&path.with_extension("rs"));
path = path.with_extension("");
if path.extension().is_none() {
break;
}
}
}
}
}
rs_files.par_iter().map(PathBuf::as_path).for_each(for_each);
}
pub fn base_dir_filter(entry: &DirEntry) -> bool {
let path = entry.path();
let mut path_string = path.to_string_lossy();
if cfg!(windows) {
path_string = path_string.replace('\\', "/").into();
}
let path_string = if path_string == "tests/rust" {
return true;
} else if let Some(path) = path_string.strip_prefix("tests/rust/") {
path
} else {
panic!("unexpected path in Rust dist: {}", path_string);
};
if path.is_dir() {
return !EXCLUDE_DIRS.contains(&path_string);
}
if path.extension() != Some(OsStr::new("rs")) {
return false;
}
!EXCLUDE_FILES.contains(&path_string)
}
#[allow(dead_code)]
pub fn edition(path: &Path) -> &'static str {
if path.ends_with("dyn-2015-no-warnings-without-lints.rs") {
"2015"
} else {
"2021"
}
}
#[allow(dead_code)]
pub fn abort_after() -> usize {
match env::var("ABORT_AFTER_FAILURE") {
Ok(s) => s.parse().expect("failed to parse ABORT_AFTER_FAILURE"),
Err(_) => usize::MAX,
}
}
pub fn rayon_init() {
let stack_size = match env::var("RUST_MIN_STACK") {
Ok(s) => s.parse().expect("failed to parse RUST_MIN_STACK"),
Err(_) => 1024 * 1024 * if cfg!(debug_assertions) { 40 } else { 20 },
};
ThreadPoolBuilder::new()
.stack_size(stack_size)
.build_global()
.unwrap();
}
pub fn clone_rust() {
let needs_clone = match fs::read_to_string("tests/rust/COMMIT") {
Err(_) => true,
Ok(contents) => contents.trim() != REVISION,
};
if needs_clone {
download_and_unpack().unwrap();
}
let mut missing = String::new();
let test_src = Path::new("tests/rust");
let mut exclude_files_set = BTreeSet::new();
for exclude in EXCLUDE_FILES {
if !exclude_files_set.insert(exclude) {
panic!("duplicate path in EXCLUDE_FILES: {}", exclude);
}
for dir in EXCLUDE_DIRS {
if Path::new(exclude).starts_with(dir) {
panic!("excluded file {} is inside an excluded dir", exclude);
}
}
if !test_src.join(exclude).is_file() {
missing += "\ntests/rust/";
missing += exclude;
}
}
let mut exclude_dirs_set = BTreeSet::new();
for exclude in EXCLUDE_DIRS {
if !exclude_dirs_set.insert(exclude) {
panic!("duplicate path in EXCLUDE_DIRS: {}", exclude);
}
if !test_src.join(exclude).is_dir() {
missing += "\ntests/rust/";
missing += exclude;
missing += "/";
}
}
if !missing.is_empty() {
panic!("excluded test file does not exist:{}\n", missing);
}
}
fn download_and_unpack() -> Result<()> {
let url = format!("https://github.com/rust-lang/rust/archive/{REVISION}.tar.gz");
errorf!("downloading {url}\n");
let response = reqwest::blocking::get(url)?.error_for_status()?;
let progress = Progress::new(response);
let decoder = GzDecoder::new(progress);
let mut archive = Archive::new(decoder);
let prefix = format!("rust-{}", REVISION);
let tests_rust = Path::new("tests/rust");
if tests_rust.exists() {
fs::remove_dir_all(tests_rust)?;
}
for entry in archive.entries()? {
let mut entry = entry?;
let path = entry.path()?;
if path == Path::new("pax_global_header") {
continue;
}
let relative = path.strip_prefix(&prefix)?;
let out = tests_rust.join(relative);
entry.unpack(&out)?;
}
fs::write("tests/rust/COMMIT", REVISION)?;
Ok(())
}

37
vendor/syn/tests/repo/progress.rs vendored Normal file
View File

@@ -0,0 +1,37 @@
use std::io::{Read, Result};
use std::time::{Duration, Instant};
pub struct Progress<R> {
bytes: usize,
tick: Instant,
stream: R,
}
impl<R> Progress<R> {
pub fn new(stream: R) -> Self {
Progress {
bytes: 0,
tick: Instant::now() + Duration::from_millis(2000),
stream,
}
}
}
impl<R: Read> Read for Progress<R> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let num = self.stream.read(buf)?;
self.bytes += num;
let now = Instant::now();
if now > self.tick {
self.tick = now + Duration::from_millis(500);
errorf!("downloading... {} bytes\n", self.bytes);
}
Ok(num)
}
}
impl<R> Drop for Progress<R> {
fn drop(&mut self) {
errorf!("done ({} bytes)\n", self.bytes);
}
}

68
vendor/syn/tests/snapshot/mod.rs vendored Normal file
View File

@@ -0,0 +1,68 @@
#![allow(unused_macros, unused_macro_rules)]
use std::str::FromStr;
use syn::parse::Result;
macro_rules! snapshot {
($($args:tt)*) => {
snapshot_impl!(() $($args)*)
};
}
macro_rules! snapshot_impl {
(($expr:ident) as $t:ty, @$snapshot:literal) => {
let tokens = crate::snapshot::TryIntoTokens::try_into_tokens($expr).unwrap();
let $expr: $t = syn::parse_quote!(#tokens);
let debug = crate::debug::Lite(&$expr);
if !cfg!(miri) {
#[allow(clippy::needless_raw_string_hashes)] // https://github.com/mitsuhiko/insta/issues/389
{
insta::assert_debug_snapshot!(debug, @$snapshot);
}
}
};
(($($expr:tt)*) as $t:ty, @$snapshot:literal) => {{
let tokens = crate::snapshot::TryIntoTokens::try_into_tokens($($expr)*).unwrap();
let syntax_tree: $t = syn::parse_quote!(#tokens);
let debug = crate::debug::Lite(&syntax_tree);
if !cfg!(miri) {
#[allow(clippy::needless_raw_string_hashes)]
{
insta::assert_debug_snapshot!(debug, @$snapshot);
}
}
syntax_tree
}};
(($($expr:tt)*) , @$snapshot:literal) => {{
let syntax_tree = $($expr)*;
let debug = crate::debug::Lite(&syntax_tree);
if !cfg!(miri) {
#[allow(clippy::needless_raw_string_hashes)]
{
insta::assert_debug_snapshot!(debug, @$snapshot);
}
}
syntax_tree
}};
(($($expr:tt)*) $next:tt $($rest:tt)*) => {
snapshot_impl!(($($expr)* $next) $($rest)*)
};
}
pub trait TryIntoTokens {
#[allow(dead_code)]
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream>;
}
impl TryIntoTokens for &str {
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> {
let tokens = proc_macro2::TokenStream::from_str(self)?;
Ok(tokens)
}
}
impl TryIntoTokens for proc_macro2::TokenStream {
fn try_into_tokens(self) -> Result<proc_macro2::TokenStream> {
Ok(self)
}
}

49
vendor/syn/tests/test_asyncness.rs vendored Normal file
View File

@@ -0,0 +1,49 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use syn::{Expr, Item};
#[test]
fn test_async_fn() {
let input = "async fn process() {}";
snapshot!(input as Item, @r#"
Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
asyncness: Some,
ident: "process",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
}
"#);
}
#[test]
fn test_async_closure() {
let input = "async || {}";
snapshot!(input as Expr, @r#"
Expr::Closure {
asyncness: Some,
output: ReturnType::Default,
body: Expr::Block {
block: Block {
stmts: [],
},
},
}
"#);
}

231
vendor/syn/tests/test_attribute.rs vendored Normal file
View File

@@ -0,0 +1,231 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use syn::parse::Parser;
use syn::{Attribute, Meta};
#[test]
fn test_meta_item_word() {
let meta = test("#[foo]");
snapshot!(meta, @r#"
Meta::Path {
segments: [
PathSegment {
ident: "foo",
},
],
}
"#);
}
#[test]
fn test_meta_item_name_value() {
let meta = test("#[foo = 5]");
snapshot!(meta, @r#"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: 5,
},
}
"#);
}
#[test]
fn test_meta_item_bool_value() {
let meta = test("#[foo = true]");
snapshot!(meta, @r#"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: Lit::Bool {
value: true,
},
},
}
"#);
let meta = test("#[foo = false]");
snapshot!(meta, @r#"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: Lit::Bool {
value: false,
},
},
}
"#);
}
#[test]
fn test_meta_item_list_lit() {
let meta = test("#[foo(5)]");
snapshot!(meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`5`),
}
"#);
}
#[test]
fn test_meta_item_list_word() {
let meta = test("#[foo(bar)]");
snapshot!(meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`bar`),
}
"#);
}
#[test]
fn test_meta_item_list_name_value() {
let meta = test("#[foo(bar = 5)]");
snapshot!(meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`bar = 5`),
}
"#);
}
#[test]
fn test_meta_item_list_bool_value() {
let meta = test("#[foo(bar = true)]");
snapshot!(meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`bar = true`),
}
"#);
}
#[test]
fn test_meta_item_multiple() {
let meta = test("#[foo(word, name = 5, list(name2 = 6), word2)]");
snapshot!(meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
}
"#);
}
#[test]
fn test_bool_lit() {
let meta = test("#[foo(true)]");
snapshot!(meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`true`),
}
"#);
}
#[test]
fn test_negative_lit() {
let meta = test("#[form(min = -1, max = 200)]");
snapshot!(meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "form",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`min = - 1 , max = 200`),
}
"#);
}
fn test(input: &str) -> Meta {
let attrs = Attribute::parse_outer.parse_str(input).unwrap();
assert_eq!(attrs.len(), 1);
let attr = attrs.into_iter().next().unwrap();
attr.meta
}

785
vendor/syn/tests/test_derive_input.rs vendored Normal file
View File

@@ -0,0 +1,785 @@
#![allow(
clippy::assertions_on_result_states,
clippy::elidable_lifetime_names,
clippy::manual_let_else,
clippy::needless_lifetimes,
clippy::too_many_lines,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use quote::quote;
use syn::{Data, DeriveInput};
#[test]
fn test_unit() {
let input = quote! {
struct Unit;
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "Unit",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"#);
}
#[test]
fn test_struct() {
let input = quote! {
#[derive(Debug, Clone)]
pub struct Item {
pub ident: Ident,
pub attrs: Vec<Attribute>
}
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::List {
path: Path {
segments: [
PathSegment {
ident: "derive",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`Debug , Clone`),
},
},
],
vis: Visibility::Public,
ident: "Item",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Visibility::Public,
ident: Some("ident"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Ident",
},
],
},
},
},
Token![,],
Field {
vis: Visibility::Public,
ident: Some("attrs"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Attribute",
},
],
},
}),
],
},
},
],
},
},
},
],
},
},
}
"#);
snapshot!(&input.attrs[0].meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "derive",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`Debug , Clone`),
}
"#);
}
#[test]
fn test_union() {
let input = quote! {
union MaybeUninit<T> {
uninit: (),
value: T
}
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "MaybeUninit",
generics: Generics {
lt_token: Some,
params: [
GenericParam::Type(TypeParam {
ident: "T",
}),
],
gt_token: Some,
},
data: Data::Union {
fields: FieldsNamed {
named: [
Field {
vis: Visibility::Inherited,
ident: Some("uninit"),
colon_token: Some,
ty: Type::Tuple,
},
Token![,],
Field {
vis: Visibility::Inherited,
ident: Some("value"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
},
],
},
},
}
"#);
}
#[test]
#[cfg(feature = "full")]
fn test_enum() {
let input = quote! {
/// See the std::result module documentation for details.
#[must_use]
pub enum Result<T, E> {
Ok(T),
Err(E),
Surprise = 0isize,
// Smuggling data into a proc_macro_derive,
// in the style of https://github.com/dtolnay/proc-macro-hack
ProcMacroHack = (0, "data").0
}
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "doc",
},
],
},
value: Expr::Lit {
lit: " See the std::result module documentation for details.",
},
},
},
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "must_use",
},
],
},
},
],
vis: Visibility::Public,
ident: "Result",
generics: Generics {
lt_token: Some,
params: [
GenericParam::Type(TypeParam {
ident: "T",
}),
Token![,],
GenericParam::Type(TypeParam {
ident: "E",
}),
],
gt_token: Some,
},
data: Data::Enum {
variants: [
Variant {
ident: "Ok",
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
},
],
},
},
Token![,],
Variant {
ident: "Err",
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "E",
},
],
},
},
},
],
},
},
Token![,],
Variant {
ident: "Surprise",
fields: Fields::Unit,
discriminant: Some(Expr::Lit {
lit: 0isize,
}),
},
Token![,],
Variant {
ident: "ProcMacroHack",
fields: Fields::Unit,
discriminant: Some(Expr::Field {
base: Expr::Tuple {
elems: [
Expr::Lit {
lit: 0,
},
Token![,],
Expr::Lit {
lit: "data",
},
],
},
member: Member::Unnamed(Index {
index: 0,
}),
}),
},
],
},
}
"#);
let meta_items: Vec<_> = input.attrs.into_iter().map(|attr| attr.meta).collect();
snapshot!(meta_items, @r#"
[
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "doc",
},
],
},
value: Expr::Lit {
lit: " See the std::result module documentation for details.",
},
},
Meta::Path {
segments: [
PathSegment {
ident: "must_use",
},
],
},
]
"#);
}
#[test]
fn test_attr_with_non_mod_style_path() {
let input = quote! {
#[inert <T>]
struct S;
};
syn::parse2::<DeriveInput>(input).unwrap_err();
}
#[test]
fn test_attr_with_mod_style_path_with_self() {
let input = quote! {
#[foo::self]
struct S;
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "foo",
},
Token![::],
PathSegment {
ident: "self",
},
],
},
},
],
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"#);
snapshot!(&input.attrs[0].meta, @r#"
Meta::Path {
segments: [
PathSegment {
ident: "foo",
},
Token![::],
PathSegment {
ident: "self",
},
],
}
"#);
}
#[test]
fn test_pub_restricted() {
// Taken from tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs
let input = quote! {
pub(in m) struct Z(pub(in m::n) u8);
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "m",
},
],
},
},
ident: "Z",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "m",
},
Token![::],
PathSegment {
ident: "n",
},
],
},
},
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
},
],
},
},
},
],
},
semi_token: Some,
},
}
"#);
}
#[test]
fn test_pub_restricted_crate() {
let input = quote! {
pub(crate) struct S;
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Restricted {
path: Path {
segments: [
PathSegment {
ident: "crate",
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"#);
}
#[test]
fn test_pub_restricted_super() {
let input = quote! {
pub(super) struct S;
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Restricted {
path: Path {
segments: [
PathSegment {
ident: "super",
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"#);
}
#[test]
fn test_pub_restricted_in_super() {
let input = quote! {
pub(in super) struct S;
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Restricted {
in_token: Some,
path: Path {
segments: [
PathSegment {
ident: "super",
},
],
},
},
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"#);
}
#[test]
fn test_fields_on_unit_struct() {
let input = quote! {
struct S;
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"#);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
assert_eq!(0, data.fields.iter().count());
}
#[test]
fn test_fields_on_named_struct() {
let input = quote! {
struct S {
foo: i32,
pub bar: String,
}
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Visibility::Inherited,
ident: Some("foo"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
},
Token![,],
Field {
vis: Visibility::Public,
ident: Some("bar"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
},
],
},
},
},
Token![,],
],
},
},
}
"#);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
snapshot!(data.fields.into_iter().collect::<Vec<_>>(), @r#"
[
Field {
vis: Visibility::Inherited,
ident: Some("foo"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
},
Field {
vis: Visibility::Public,
ident: Some("bar"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
},
],
},
},
},
]
"#);
}
#[test]
fn test_fields_on_tuple_struct() {
let input = quote! {
struct S(i32, pub String);
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
},
Token![,],
Field {
vis: Visibility::Public,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
},
],
},
},
},
],
},
semi_token: Some,
},
}
"#);
let data = match input.data {
Data::Struct(data) => data,
_ => panic!("expected a struct"),
};
snapshot!(data.fields.iter().collect::<Vec<_>>(), @r#"
[
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
},
Field {
vis: Visibility::Public,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "String",
},
],
},
},
},
]
"#);
}
#[test]
fn test_ambiguous_crate() {
let input = quote! {
// The field type is `(crate::X)` not `crate (::X)`.
struct S(crate::X);
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "crate",
},
Token![::],
PathSegment {
ident: "X",
},
],
},
},
},
],
},
semi_token: Some,
},
}
"#);
}

1702
vendor/syn/tests/test_expr.rs vendored Normal file

File diff suppressed because it is too large Load Diff

322
vendor/syn/tests/test_generics.rs vendored Normal file
View File

@@ -0,0 +1,322 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::manual_let_else,
clippy::needless_lifetimes,
clippy::too_many_lines,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use quote::quote;
use syn::{DeriveInput, ItemFn, TypeParamBound, WhereClause, WherePredicate};
#[test]
fn test_split_for_impl() {
let input = quote! {
struct S<'a, 'b: 'a, #[may_dangle] T: 'a = ()> where T: Debug;
};
snapshot!(input as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics {
lt_token: Some,
params: [
GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime {
ident: "a",
},
}),
Token![,],
GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime {
ident: "b",
},
colon_token: Some,
bounds: [
Lifetime {
ident: "a",
},
],
}),
Token![,],
GenericParam::Type(TypeParam {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "may_dangle",
},
],
},
},
],
ident: "T",
colon_token: Some,
bounds: [
TypeParamBound::Lifetime {
ident: "a",
},
],
eq_token: Some,
default: Some(Type::Tuple),
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
WherePredicate::Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Debug",
},
],
},
}),
],
}),
],
}),
},
data: Data::Struct {
fields: Fields::Unit,
semi_token: Some,
},
}
"#);
let generics = input.generics;
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let generated = quote! {
impl #impl_generics MyTrait for Test #ty_generics #where_clause {}
};
let expected = quote! {
impl<'a, 'b: 'a, #[may_dangle] T: 'a> MyTrait
for Test<'a, 'b, T>
where
T: Debug
{}
};
assert_eq!(generated.to_string(), expected.to_string());
let turbofish = ty_generics.as_turbofish();
let generated = quote! {
Test #turbofish
};
let expected = quote! {
Test::<'a, 'b, T>
};
assert_eq!(generated.to_string(), expected.to_string());
}
#[test]
fn test_type_param_bound() {
let tokens = quote!('a);
snapshot!(tokens as TypeParamBound, @r#"
TypeParamBound::Lifetime {
ident: "a",
}
"#);
let tokens = quote!('_);
snapshot!(tokens as TypeParamBound, @r#"
TypeParamBound::Lifetime {
ident: "_",
}
"#);
let tokens = quote!(Debug);
snapshot!(tokens as TypeParamBound, @r#"
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Debug",
},
],
},
})
"#);
let tokens = quote!(?Sized);
snapshot!(tokens as TypeParamBound, @r#"
TypeParamBound::Trait(TraitBound {
modifier: TraitBoundModifier::Maybe,
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
})
"#);
let tokens = quote!(for<'a> Trait);
snapshot!(tokens as TypeParamBound, @r#"
TypeParamBound::Trait(TraitBound {
lifetimes: Some(BoundLifetimes {
lifetimes: [
GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime {
ident: "a",
},
}),
],
}),
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
})
"#);
let tokens = quote!(for<> ?Trait);
let err = syn::parse2::<TypeParamBound>(tokens).unwrap_err();
assert_eq!(
"`for<...>` binder not allowed with `?` trait polarity modifier",
err.to_string(),
);
let tokens = quote!(?for<> Trait);
let err = syn::parse2::<TypeParamBound>(tokens).unwrap_err();
assert_eq!(
"`for<...>` binder not allowed with `?` trait polarity modifier",
err.to_string(),
);
}
#[test]
fn test_fn_precedence_in_where_clause() {
// This should parse as two separate bounds, `FnOnce() -> i32` and `Send` - not
// `FnOnce() -> (i32 + Send)`.
let input = quote! {
fn f<G>()
where
G: FnOnce() -> i32 + Send,
{
}
};
snapshot!(input as ItemFn, @r#"
ItemFn {
vis: Visibility::Inherited,
sig: Signature {
ident: "f",
generics: Generics {
lt_token: Some,
params: [
GenericParam::Type(TypeParam {
ident: "G",
}),
],
gt_token: Some,
where_clause: Some(WhereClause {
predicates: [
WherePredicate::Type(PredicateType {
bounded_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "G",
},
],
},
},
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "FnOnce",
arguments: PathArguments::Parenthesized {
output: ReturnType::Type(
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "i32",
},
],
},
},
),
},
},
],
},
}),
Token![+],
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Send",
},
],
},
}),
],
}),
Token![,],
],
}),
},
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
}
"#);
let where_clause = input.sig.generics.where_clause.as_ref().unwrap();
assert_eq!(where_clause.predicates.len(), 1);
let predicate = match &where_clause.predicates[0] {
WherePredicate::Type(pred) => pred,
_ => panic!("wrong predicate kind"),
};
assert_eq!(predicate.bounds.len(), 2, "{:#?}", predicate.bounds);
let first_bound = &predicate.bounds[0];
assert_eq!(quote!(#first_bound).to_string(), "FnOnce () -> i32");
let second_bound = &predicate.bounds[1];
assert_eq!(quote!(#second_bound).to_string(), "Send");
}
#[test]
fn test_where_clause_at_end_of_input() {
let input = quote! {
where
};
snapshot!(input as WhereClause, @"WhereClause");
assert_eq!(input.predicates.len(), 0);
}

59
vendor/syn/tests/test_grouping.rs vendored Normal file
View File

@@ -0,0 +1,59 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};
use syn::Expr;
#[test]
fn test_grouping() {
let tokens: TokenStream = TokenStream::from_iter([
TokenTree::Literal(Literal::i32_suffixed(1)),
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter([
TokenTree::Literal(Literal::i32_suffixed(2)),
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Literal(Literal::i32_suffixed(3)),
]),
)),
TokenTree::Punct(Punct::new('*', Spacing::Alone)),
TokenTree::Literal(Literal::i32_suffixed(4)),
]);
assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32");
snapshot!(tokens as Expr, @r#"
Expr::Binary {
left: Expr::Lit {
lit: 1i32,
},
op: BinOp::Add,
right: Expr::Binary {
left: Expr::Group {
expr: Expr::Binary {
left: Expr::Lit {
lit: 2i32,
},
op: BinOp::Add,
right: Expr::Lit {
lit: 3i32,
},
},
},
op: BinOp::Mul,
right: Expr::Lit {
lit: 4i32,
},
},
}
"#);
}

87
vendor/syn/tests/test_ident.rs vendored Normal file
View File

@@ -0,0 +1,87 @@
use proc_macro2::{Ident, Span, TokenStream};
use std::str::FromStr;
use syn::Result;
#[track_caller]
fn parse(s: &str) -> Result<Ident> {
syn::parse2(TokenStream::from_str(s).unwrap())
}
#[track_caller]
fn new(s: &str) -> Ident {
Ident::new(s, Span::call_site())
}
#[test]
fn ident_parse() {
parse("String").unwrap();
}
#[test]
fn ident_parse_keyword() {
parse("abstract").unwrap_err();
}
#[test]
fn ident_parse_empty() {
parse("").unwrap_err();
}
#[test]
fn ident_parse_lifetime() {
parse("'static").unwrap_err();
}
#[test]
fn ident_parse_underscore() {
parse("_").unwrap_err();
}
#[test]
fn ident_parse_number() {
parse("255").unwrap_err();
}
#[test]
fn ident_parse_invalid() {
parse("a#").unwrap_err();
}
#[test]
fn ident_new() {
new("String");
}
#[test]
fn ident_new_keyword() {
new("abstract");
}
#[test]
#[should_panic(expected = "use Option<Ident>")]
fn ident_new_empty() {
new("");
}
#[test]
#[should_panic(expected = "not a valid Ident")]
fn ident_new_lifetime() {
new("'static");
}
#[test]
fn ident_new_underscore() {
new("_");
}
#[test]
#[should_panic(expected = "use Literal instead")]
fn ident_new_number() {
new("255");
}
#[test]
#[should_panic(expected = "\"a#\" is not a valid Ident")]
fn ident_new_invalid() {
new("a#");
}

316
vendor/syn/tests/test_item.rs vendored Normal file
View File

@@ -0,0 +1,316 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use quote::quote;
use syn::{Item, ItemTrait};
#[test]
fn test_macro_variable_attr() {
// mimics the token stream corresponding to `$attr fn f() {}`
let tokens = TokenStream::from_iter([
TokenTree::Group(Group::new(Delimiter::None, quote! { #[test] })),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Item, @r#"
Item::Fn {
attrs: [
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "test",
},
],
},
},
],
vis: Visibility::Inherited,
sig: Signature {
ident: "f",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
}
"#);
}
#[test]
fn test_negative_impl() {
#[cfg(any())]
impl ! {}
let tokens = quote! {
impl ! {}
};
snapshot!(tokens as Item, @r#"
Item::Impl {
generics: Generics,
self_ty: Type::Never,
}
"#);
let tokens = quote! {
impl !Trait {}
};
let err = syn::parse2::<Item>(tokens).unwrap_err();
assert_eq!(err.to_string(), "inherent impls cannot be negative");
#[cfg(any())]
impl !Trait for T {}
let tokens = quote! {
impl !Trait for T {}
};
snapshot!(tokens as Item, @r#"
Item::Impl {
generics: Generics,
trait_: Some((
Some,
Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
)),
self_ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
}
"#);
}
#[test]
fn test_macro_variable_impl() {
// mimics the token stream corresponding to `impl $trait for $ty {}`
let tokens = TokenStream::from_iter([
TokenTree::Ident(Ident::new("impl", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::None, quote!(Trait))),
TokenTree::Ident(Ident::new("for", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::None, quote!(Type))),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]);
snapshot!(tokens as Item, @r#"
Item::Impl {
generics: Generics,
trait_: Some((
None,
Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
)),
self_ty: Type::Group {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Type",
},
],
},
},
},
}
"#);
}
#[test]
fn test_supertraits() {
// Rustc parses all of the following.
#[rustfmt::skip]
let tokens = quote!(trait Trait where {});
snapshot!(tokens as ItemTrait, @r#"
ItemTrait {
vis: Visibility::Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
}
"#);
#[rustfmt::skip]
let tokens = quote!(trait Trait: where {});
snapshot!(tokens as ItemTrait, @r#"
ItemTrait {
vis: Visibility::Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
}
"#);
#[rustfmt::skip]
let tokens = quote!(trait Trait: Sized where {});
snapshot!(tokens as ItemTrait, @r#"
ItemTrait {
vis: Visibility::Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
supertraits: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
}),
],
}
"#);
#[rustfmt::skip]
let tokens = quote!(trait Trait: Sized + where {});
snapshot!(tokens as ItemTrait, @r#"
ItemTrait {
vis: Visibility::Inherited,
ident: "Trait",
generics: Generics {
where_clause: Some(WhereClause),
},
colon_token: Some,
supertraits: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
}),
Token![+],
],
}
"#);
}
#[test]
fn test_type_empty_bounds() {
#[rustfmt::skip]
let tokens = quote! {
trait Foo {
type Bar: ;
}
};
snapshot!(tokens as ItemTrait, @r#"
ItemTrait {
vis: Visibility::Inherited,
ident: "Foo",
generics: Generics,
items: [
TraitItem::Type {
ident: "Bar",
generics: Generics,
colon_token: Some,
},
],
}
"#);
}
#[test]
fn test_impl_visibility() {
let tokens = quote! {
pub default unsafe impl union {}
};
snapshot!(tokens as Item, @"Item::Verbatim(`pub default unsafe impl union { }`)");
}
#[test]
fn test_impl_type_parameter_defaults() {
#[cfg(any())]
impl<T = ()> () {}
let tokens = quote! {
impl<T = ()> () {}
};
snapshot!(tokens as Item, @r#"
Item::Impl {
generics: Generics {
lt_token: Some,
params: [
GenericParam::Type(TypeParam {
ident: "T",
eq_token: Some,
default: Some(Type::Tuple),
}),
],
gt_token: Some,
},
self_ty: Type::Tuple,
}
"#);
}
#[test]
fn test_impl_trait_trailing_plus() {
let tokens = quote! {
fn f() -> impl Sized + {}
};
snapshot!(tokens as Item, @r#"
Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
ident: "f",
generics: Generics,
output: ReturnType::Type(
Type::ImplTrait {
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
}),
Token![+],
],
},
),
},
block: Block {
stmts: [],
},
}
"#);
}

105
vendor/syn/tests/test_iterators.rs vendored Normal file
View File

@@ -0,0 +1,105 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::map_unwrap_or,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
use syn::punctuated::{Pair, Punctuated};
use syn::{parse_quote, GenericParam, Generics, Lifetime, LifetimeParam, Token};
macro_rules! punctuated {
($($e:expr,)+) => {{
let mut seq = ::syn::punctuated::Punctuated::new();
$(
seq.push($e);
)+
seq
}};
($($e:expr),+) => {
punctuated!($($e,)+)
};
}
macro_rules! check_exact_size_iterator {
($iter:expr) => {{
let iter = $iter;
let size_hint = iter.size_hint();
let len = iter.len();
let count = iter.count();
assert_eq!(len, count);
assert_eq!(size_hint, (count, Some(count)));
}};
}
#[test]
fn pairs() {
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
check_exact_size_iterator!(p.pairs());
check_exact_size_iterator!(p.pairs_mut());
check_exact_size_iterator!(p.into_pairs());
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
assert_eq!(p.pairs().next_back().map(Pair::into_value), Some(&4));
assert_eq!(
p.pairs_mut().next_back().map(Pair::into_value),
Some(&mut 4)
);
assert_eq!(p.into_pairs().next_back().map(Pair::into_value), Some(4));
}
#[test]
fn iter() {
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
check_exact_size_iterator!(p.iter());
check_exact_size_iterator!(p.iter_mut());
check_exact_size_iterator!(p.into_iter());
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
assert_eq!(p.iter().next_back(), Some(&4));
assert_eq!(p.iter_mut().next_back(), Some(&mut 4));
assert_eq!(p.into_iter().next_back(), Some(4));
}
#[test]
fn may_dangle() {
let p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
for element in &p {
if *element == 2 {
drop(p);
break;
}
}
let mut p: Punctuated<_, Token![,]> = punctuated!(2, 3, 4);
for element in &mut p {
if *element == 2 {
drop(p);
break;
}
}
}
// Regression test for https://github.com/dtolnay/syn/issues/1718
#[test]
fn no_opaque_drop() {
let mut generics = Generics::default();
let _ = generics
.lifetimes()
.next()
.map(|param| param.lifetime.clone())
.unwrap_or_else(|| {
let lifetime: Lifetime = parse_quote!('a);
generics.params.insert(
0,
GenericParam::Lifetime(LifetimeParam::new(lifetime.clone())),
);
lifetime
});
}

335
vendor/syn/tests/test_lit.rs vendored Normal file
View File

@@ -0,0 +1,335 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::float_cmp,
clippy::needless_lifetimes,
clippy::needless_raw_string_hashes,
clippy::non_ascii_literal,
clippy::single_match_else,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
use quote::ToTokens;
use std::ffi::CStr;
use std::str::FromStr;
use syn::{Lit, LitFloat, LitInt, LitStr};
#[track_caller]
fn lit(s: &str) -> Lit {
let mut tokens = TokenStream::from_str(s).unwrap().into_iter();
match tokens.next().unwrap() {
TokenTree::Literal(lit) => {
assert!(tokens.next().is_none());
Lit::new(lit)
}
wrong => panic!("{:?}", wrong),
}
}
#[test]
fn strings() {
#[track_caller]
fn test_string(s: &str, value: &str) {
let s = s.trim();
match lit(s) {
Lit::Str(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_string(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_string(r#" "" "#, "");
test_string(r#" "a" "#, "a");
test_string(r#" "\n" "#, "\n");
test_string(r#" "\r" "#, "\r");
test_string(r#" "\t" "#, "\t");
test_string(r#" "🐕" "#, "🐕"); // NOTE: This is an emoji
test_string(r#" "\"" "#, "\"");
test_string(r#" "'" "#, "'");
test_string(r#" "\u{1F415}" "#, "\u{1F415}");
test_string(r#" "\u{1_2__3_}" "#, "\u{123}");
test_string(
"\"contains\nnewlines\\\nescaped newlines\"",
"contains\nnewlinesescaped newlines",
);
test_string(
"\"escaped newline\\\n \x0C unsupported whitespace\"",
"escaped newline\x0C unsupported whitespace",
);
test_string("r\"raw\nstring\\\nhere\"", "raw\nstring\\\nhere");
test_string("\"...\"q", "...");
test_string("r\"...\"q", "...");
test_string("r##\"...\"##q", "...");
}
#[test]
fn byte_strings() {
#[track_caller]
fn test_byte_string(s: &str, value: &[u8]) {
let s = s.trim();
match lit(s) {
Lit::ByteStr(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_byte_string(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_byte_string(r#" b"" "#, b"");
test_byte_string(r#" b"a" "#, b"a");
test_byte_string(r#" b"\n" "#, b"\n");
test_byte_string(r#" b"\r" "#, b"\r");
test_byte_string(r#" b"\t" "#, b"\t");
test_byte_string(r#" b"\"" "#, b"\"");
test_byte_string(r#" b"'" "#, b"'");
test_byte_string(
"b\"contains\nnewlines\\\nescaped newlines\"",
b"contains\nnewlinesescaped newlines",
);
test_byte_string("br\"raw\nstring\\\nhere\"", b"raw\nstring\\\nhere");
test_byte_string("b\"...\"q", b"...");
test_byte_string("br\"...\"q", b"...");
test_byte_string("br##\"...\"##q", b"...");
}
#[test]
fn c_strings() {
#[track_caller]
fn test_c_string(s: &str, value: &CStr) {
let s = s.trim();
match lit(s) {
Lit::CStr(lit) => {
assert_eq!(*lit.value(), *value);
let again = lit.into_token_stream().to_string();
if again != s {
test_c_string(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_c_string(r#" c"" "#, c"");
test_c_string(r#" c"a" "#, c"a");
test_c_string(r#" c"\n" "#, c"\n");
test_c_string(r#" c"\r" "#, c"\r");
test_c_string(r#" c"\t" "#, c"\t");
test_c_string(r#" c"\\" "#, c"\\");
test_c_string(r#" c"\'" "#, c"'");
test_c_string(r#" c"\"" "#, c"\"");
test_c_string(
"c\"contains\nnewlines\\\nescaped newlines\"",
c"contains\nnewlinesescaped newlines",
);
test_c_string("cr\"raw\nstring\\\nhere\"", c"raw\nstring\\\nhere");
test_c_string("c\"...\"q", c"...");
test_c_string("cr\"...\"", c"...");
test_c_string("cr##\"...\"##", c"...");
test_c_string(
r#" c"hello\x80我叫\u{1F980}" "#, // from the RFC
c"hello\x80我叫\u{1F980}",
);
}
#[test]
fn bytes() {
#[track_caller]
fn test_byte(s: &str, value: u8) {
let s = s.trim();
match lit(s) {
Lit::Byte(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
assert_eq!(again, s);
}
wrong => panic!("{:?}", wrong),
}
}
test_byte(r#" b'a' "#, b'a');
test_byte(r#" b'\n' "#, b'\n');
test_byte(r#" b'\r' "#, b'\r');
test_byte(r#" b'\t' "#, b'\t');
test_byte(r#" b'\'' "#, b'\'');
test_byte(r#" b'"' "#, b'"');
test_byte(r#" b'a'q "#, b'a');
}
#[test]
fn chars() {
#[track_caller]
fn test_char(s: &str, value: char) {
let s = s.trim();
match lit(s) {
Lit::Char(lit) => {
assert_eq!(lit.value(), value);
let again = lit.into_token_stream().to_string();
if again != s {
test_char(&again, value);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_char(r#" 'a' "#, 'a');
test_char(r#" '\n' "#, '\n');
test_char(r#" '\r' "#, '\r');
test_char(r#" '\t' "#, '\t');
test_char(r#" '🐕' "#, '🐕'); // NOTE: This is an emoji
test_char(r#" '\'' "#, '\'');
test_char(r#" '"' "#, '"');
test_char(r#" '\u{1F415}' "#, '\u{1F415}');
test_char(r#" 'a'q "#, 'a');
}
#[test]
fn ints() {
#[track_caller]
fn test_int(s: &str, value: u64, suffix: &str) {
match lit(s) {
Lit::Int(lit) => {
assert_eq!(lit.base10_digits().parse::<u64>().unwrap(), value);
assert_eq!(lit.suffix(), suffix);
let again = lit.into_token_stream().to_string();
if again != s {
test_int(&again, value, suffix);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_int("5", 5, "");
test_int("5u32", 5, "u32");
test_int("0E", 0, "E");
test_int("0ECMA", 0, "ECMA");
test_int("0o0A", 0, "A");
test_int("5_0", 50, "");
test_int("5_____0_____", 50, "");
test_int("0x7f", 127, "");
test_int("0x7F", 127, "");
test_int("0b1001", 9, "");
test_int("0o73", 59, "");
test_int("0x7Fu8", 127, "u8");
test_int("0b1001i8", 9, "i8");
test_int("0o73u32", 59, "u32");
test_int("0x__7___f_", 127, "");
test_int("0x__7___F_", 127, "");
test_int("0b_1_0__01", 9, "");
test_int("0o_7__3", 59, "");
test_int("0x_7F__u8", 127, "u8");
test_int("0b__10__0_1i8", 9, "i8");
test_int("0o__7__________________3u32", 59, "u32");
test_int("0e1\u{5c5}", 0, "e1\u{5c5}");
}
#[test]
fn floats() {
#[track_caller]
fn test_float(s: &str, value: f64, suffix: &str) {
match lit(s) {
Lit::Float(lit) => {
assert_eq!(lit.base10_digits().parse::<f64>().unwrap(), value);
assert_eq!(lit.suffix(), suffix);
let again = lit.into_token_stream().to_string();
if again != s {
test_float(&again, value, suffix);
}
}
wrong => panic!("{:?}", wrong),
}
}
test_float("5.5", 5.5, "");
test_float("5.5E12", 5.5e12, "");
test_float("5.5e12", 5.5e12, "");
test_float("1.0__3e-12", 1.03e-12, "");
test_float("1.03e+12", 1.03e12, "");
test_float("9e99e99", 9e99, "e99");
test_float("1e_0", 1.0, "");
test_float("0.0ECMA", 0.0, "ECMA");
}
#[test]
fn negative() {
let span = Span::call_site();
assert_eq!("-1", LitInt::new("-1", span).to_string());
assert_eq!("-1i8", LitInt::new("-1i8", span).to_string());
assert_eq!("-1i16", LitInt::new("-1i16", span).to_string());
assert_eq!("-1i32", LitInt::new("-1i32", span).to_string());
assert_eq!("-1i64", LitInt::new("-1i64", span).to_string());
assert_eq!("-1.5", LitFloat::new("-1.5", span).to_string());
assert_eq!("-1.5f32", LitFloat::new("-1.5f32", span).to_string());
assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
}
#[test]
fn suffix() {
#[track_caller]
fn get_suffix(token: &str) -> String {
let lit = syn::parse_str::<Lit>(token).unwrap();
match lit {
Lit::Str(lit) => lit.suffix().to_owned(),
Lit::ByteStr(lit) => lit.suffix().to_owned(),
Lit::CStr(lit) => lit.suffix().to_owned(),
Lit::Byte(lit) => lit.suffix().to_owned(),
Lit::Char(lit) => lit.suffix().to_owned(),
Lit::Int(lit) => lit.suffix().to_owned(),
Lit::Float(lit) => lit.suffix().to_owned(),
_ => unimplemented!(),
}
}
assert_eq!(get_suffix("\"\"s"), "s");
assert_eq!(get_suffix("r\"\"r"), "r");
assert_eq!(get_suffix("r#\"\"#r"), "r");
assert_eq!(get_suffix("b\"\"b"), "b");
assert_eq!(get_suffix("br\"\"br"), "br");
assert_eq!(get_suffix("br#\"\"#br"), "br");
assert_eq!(get_suffix("c\"\"c"), "c");
assert_eq!(get_suffix("cr\"\"cr"), "cr");
assert_eq!(get_suffix("cr#\"\"#cr"), "cr");
assert_eq!(get_suffix("'c'c"), "c");
assert_eq!(get_suffix("b'b'b"), "b");
assert_eq!(get_suffix("1i32"), "i32");
assert_eq!(get_suffix("1_i32"), "i32");
assert_eq!(get_suffix("1.0f32"), "f32");
assert_eq!(get_suffix("1.0_f32"), "f32");
}
#[test]
fn test_deep_group_empty() {
let tokens = TokenStream::from_iter([TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter([TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter([TokenTree::Literal(Literal::string("hi"))]),
))]),
))]);
snapshot!(tokens as Lit, @r#""hi""# );
}
#[test]
fn test_error() {
let err = syn::parse_str::<LitStr>("...").unwrap_err();
assert_eq!("expected string literal", err.to_string());
let err = syn::parse_str::<LitStr>("5").unwrap_err();
assert_eq!("expected string literal", err.to_string());
}

158
vendor/syn/tests/test_meta.rs vendored Normal file
View File

@@ -0,0 +1,158 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::shadow_unrelated,
clippy::too_many_lines,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use syn::{Meta, MetaList, MetaNameValue};
#[test]
fn test_parse_meta_item_word() {
let input = "hello";
snapshot!(input as Meta, @r#"
Meta::Path {
segments: [
PathSegment {
ident: "hello",
},
],
}
"#);
}
#[test]
fn test_parse_meta_name_value() {
let input = "foo = 5";
let (inner, meta) = (input, input);
snapshot!(inner as MetaNameValue, @r#"
MetaNameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: 5,
},
}
"#);
snapshot!(meta as Meta, @r#"
Meta::NameValue {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
value: Expr::Lit {
lit: 5,
},
}
"#);
assert_eq!(meta, Meta::NameValue(inner));
}
#[test]
fn test_parse_meta_item_list_lit() {
let input = "foo(5)";
let (inner, meta) = (input, input);
snapshot!(inner as MetaList, @r#"
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`5`),
}
"#);
snapshot!(meta as Meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`5`),
}
"#);
assert_eq!(meta, Meta::List(inner));
}
#[test]
fn test_parse_meta_item_multiple() {
let input = "foo(word, name = 5, list(name2 = 6), word2)";
let (inner, meta) = (input, input);
snapshot!(inner as MetaList, @r#"
MetaList {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
}
"#);
snapshot!(meta as Meta, @r#"
Meta::List {
path: Path {
segments: [
PathSegment {
ident: "foo",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`word , name = 5 , list (name2 = 6) , word2`),
}
"#);
assert_eq!(meta, Meta::List(inner));
}
#[test]
fn test_parse_path() {
let input = "::serde::Serialize";
snapshot!(input as Meta, @r#"
Meta::Path {
leading_colon: Some,
segments: [
PathSegment {
ident: "serde",
},
Token![::],
PathSegment {
ident: "Serialize",
},
],
}
"#);
}

103
vendor/syn/tests/test_parse_buffer.rs vendored Normal file
View File

@@ -0,0 +1,103 @@
#![allow(clippy::non_ascii_literal)]
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, TokenStream, TokenTree};
use std::panic;
use syn::parse::discouraged::Speculative as _;
use syn::parse::{Parse, ParseStream, Parser, Result};
use syn::{parenthesized, Token};
#[test]
#[should_panic(expected = "fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_between_sources() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input1: ParseStream) -> Result<Self> {
let nested = |input2: ParseStream| {
input1.advance_to(input2);
Ok(Self)
};
nested.parse_str("")
}
}
syn::parse_str::<BreakRules>("").unwrap();
}
#[test]
#[should_panic(expected = "fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_between_brackets() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input: ParseStream) -> Result<Self> {
let a;
let b;
parenthesized!(a in input);
parenthesized!(b in input);
a.advance_to(&b);
Ok(Self)
}
}
syn::parse_str::<BreakRules>("()()").unwrap();
}
#[test]
#[should_panic(expected = "fork was not derived from the advancing parse stream")]
fn smuggled_speculative_cursor_into_brackets() {
struct BreakRules;
impl Parse for BreakRules {
fn parse(input: ParseStream) -> Result<Self> {
let a;
parenthesized!(a in input);
input.advance_to(&a);
Ok(Self)
}
}
syn::parse_str::<BreakRules>("()").unwrap();
}
#[test]
fn trailing_empty_none_group() {
fn parse(input: ParseStream) -> Result<()> {
input.parse::<Token![+]>()?;
let content;
parenthesized!(content in input);
content.parse::<Token![+]>()?;
Ok(())
}
// `+ ( + «∅ ∅» ) «∅ «∅ ∅» ∅»`
let tokens = TokenStream::from_iter([
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(
Delimiter::Parenthesis,
TokenStream::from_iter([
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
]),
)),
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter([TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::new(),
))]),
)),
]);
parse.parse2(tokens).unwrap();
}
#[test]
fn test_unwind_safe() {
fn parse(input: ParseStream) -> Result<Ident> {
let thread_result = panic::catch_unwind(|| input.parse());
thread_result.unwrap()
}
parse.parse_str("throw").unwrap();
}

172
vendor/syn/tests/test_parse_quote.rs vendored Normal file
View File

@@ -0,0 +1,172 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use syn::punctuated::Punctuated;
use syn::{parse_quote, Attribute, Field, Lit, Pat, Stmt, Token};
#[test]
fn test_attribute() {
let attr: Attribute = parse_quote!(#[test]);
snapshot!(attr, @r#"
Attribute {
style: AttrStyle::Outer,
meta: Meta::Path {
segments: [
PathSegment {
ident: "test",
},
],
},
}
"#);
let attr: Attribute = parse_quote!(#![no_std]);
snapshot!(attr, @r#"
Attribute {
style: AttrStyle::Inner,
meta: Meta::Path {
segments: [
PathSegment {
ident: "no_std",
},
],
},
}
"#);
}
#[test]
fn test_field() {
let field: Field = parse_quote!(pub enabled: bool);
snapshot!(field, @r#"
Field {
vis: Visibility::Public,
ident: Some("enabled"),
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "bool",
},
],
},
},
}
"#);
let field: Field = parse_quote!(primitive::bool);
snapshot!(field, @r#"
Field {
vis: Visibility::Inherited,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "primitive",
},
Token![::],
PathSegment {
ident: "bool",
},
],
},
},
}
"#);
}
#[test]
fn test_pat() {
let pat: Pat = parse_quote!(Some(false) | None);
snapshot!(&pat, @r#"
Pat::Or {
cases: [
Pat::TupleStruct {
path: Path {
segments: [
PathSegment {
ident: "Some",
},
],
},
elems: [
Pat::Lit(ExprLit {
lit: Lit::Bool {
value: false,
},
}),
],
},
Token![|],
Pat::Ident {
ident: "None",
},
],
}
"#);
let boxed_pat: Box<Pat> = parse_quote!(Some(false) | None);
assert_eq!(*boxed_pat, pat);
}
#[test]
fn test_punctuated() {
let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true);
snapshot!(punctuated, @r#"
[
Lit::Bool {
value: true,
},
Token![|],
Lit::Bool {
value: true,
},
]
"#);
let punctuated: Punctuated<Lit, Token![|]> = parse_quote!(true | true |);
snapshot!(punctuated, @r#"
[
Lit::Bool {
value: true,
},
Token![|],
Lit::Bool {
value: true,
},
Token![|],
]
"#);
}
#[test]
fn test_vec_stmt() {
let stmts: Vec<Stmt> = parse_quote! {
let _;
true
};
snapshot!(stmts, @r#"
[
Stmt::Local {
pat: Pat::Wild,
},
Stmt::Expr(
Expr::Lit {
lit: Lit::Bool {
value: true,
},
},
None,
),
]
"#);
}

187
vendor/syn/tests/test_parse_stream.rs vendored Normal file
View File

@@ -0,0 +1,187 @@
#![allow(clippy::items_after_statements, clippy::let_underscore_untyped)]
use proc_macro2::{Delimiter, Group, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use syn::ext::IdentExt as _;
use syn::parse::discouraged::AnyDelimiter;
use syn::parse::{ParseStream, Parser as _, Result};
use syn::{parenthesized, token, Ident, Lifetime, Token};
#[test]
fn test_peek_punct() {
let tokens = quote!(+= + =);
fn assert(input: ParseStream) -> Result<()> {
assert!(input.peek(Token![+]));
assert!(input.peek(Token![+=]));
let _: Token![+] = input.parse()?;
assert!(input.peek(Token![=]));
assert!(!input.peek(Token![==]));
assert!(!input.peek(Token![+]));
let _: Token![=] = input.parse()?;
assert!(input.peek(Token![+]));
assert!(!input.peek(Token![+=]));
let _: Token![+] = input.parse()?;
let _: Token![=] = input.parse()?;
Ok(())
}
assert.parse2(tokens).unwrap();
}
#[test]
fn test_peek_lifetime() {
// 'static ;
let tokens = TokenStream::from_iter([
TokenTree::Punct(Punct::new('\'', Spacing::Joint)),
TokenTree::Ident(Ident::new("static", Span::call_site())),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
fn assert(input: ParseStream) -> Result<()> {
assert!(input.peek(Lifetime));
assert!(input.peek2(Token![;]));
assert!(!input.peek2(Token![static]));
let _: Lifetime = input.parse()?;
assert!(input.peek(Token![;]));
let _: Token![;] = input.parse()?;
Ok(())
}
assert.parse2(tokens).unwrap();
}
#[test]
fn test_peek_not_lifetime() {
// ' static
let tokens = TokenStream::from_iter([
TokenTree::Punct(Punct::new('\'', Spacing::Alone)),
TokenTree::Ident(Ident::new("static", Span::call_site())),
]);
fn assert(input: ParseStream) -> Result<()> {
assert!(!input.peek(Lifetime));
assert!(input.parse::<Option<Punct>>()?.is_none());
let _: TokenTree = input.parse()?;
assert!(input.peek(Token![static]));
let _: Token![static] = input.parse()?;
Ok(())
}
assert.parse2(tokens).unwrap();
}
#[test]
fn test_peek_ident() {
let tokens = quote!(static var);
fn assert(input: ParseStream) -> Result<()> {
assert!(!input.peek(Ident));
assert!(input.peek(Ident::peek_any));
assert!(input.peek(Token![static]));
let _: Token![static] = input.parse()?;
assert!(input.peek(Ident));
assert!(input.peek(Ident::peek_any));
let _: Ident = input.parse()?;
Ok(())
}
assert.parse2(tokens).unwrap();
}
#[test]
fn test_peek_groups() {
// pub ( :: ) «∅ ! = ∅» static
let tokens = TokenStream::from_iter([
TokenTree::Ident(Ident::new("pub", Span::call_site())),
TokenTree::Group(Group::new(
Delimiter::Parenthesis,
TokenStream::from_iter([
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
]),
)),
TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter([
TokenTree::Punct(Punct::new('!', Spacing::Alone)),
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
]),
)),
TokenTree::Ident(Ident::new("static", Span::call_site())),
]);
fn assert(input: ParseStream) -> Result<()> {
assert!(input.peek2(token::Paren));
assert!(input.peek3(token::Group));
assert!(input.peek3(Token![!]));
let _: Token![pub] = input.parse()?;
assert!(input.peek(token::Paren));
assert!(!input.peek(Token![::]));
assert!(!input.peek2(Token![::]));
assert!(input.peek2(Token![!]));
assert!(input.peek2(token::Group));
assert!(input.peek3(Token![=]));
assert!(!input.peek3(Token![static]));
let content;
parenthesized!(content in input);
assert!(content.peek(Token![::]));
assert!(content.peek2(Token![:]));
assert!(!content.peek3(token::Group));
assert!(!content.peek3(Token![!]));
assert!(input.peek(token::Group));
assert!(input.peek(Token![!]));
let _: Token![::] = content.parse()?;
assert!(input.peek(token::Group));
assert!(input.peek(Token![!]));
assert!(input.peek2(Token![=]));
assert!(input.peek3(Token![static]));
assert!(!input.peek2(Token![static]));
let implicit = input.fork();
let explicit = input.fork();
let _: Token![!] = implicit.parse()?;
assert!(implicit.peek(Token![=]));
assert!(implicit.peek2(Token![static]));
let _: Token![=] = implicit.parse()?;
assert!(implicit.peek(Token![static]));
let (delimiter, _span, grouped) = explicit.parse_any_delimiter()?;
assert_eq!(delimiter, Delimiter::None);
assert!(grouped.peek(Token![!]));
assert!(grouped.peek2(Token![=]));
assert!(!grouped.peek3(Token![static]));
let _: Token![!] = grouped.parse()?;
assert!(grouped.peek(Token![=]));
assert!(!grouped.peek2(Token![static]));
let _: Token![=] = grouped.parse()?;
assert!(!grouped.peek(Token![static]));
let _: TokenStream = input.parse()?;
Ok(())
}
assert.parse2(tokens).unwrap();
}

158
vendor/syn/tests/test_pat.rs vendored Normal file
View File

@@ -0,0 +1,158 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
use quote::{quote, ToTokens as _};
use syn::parse::Parser;
use syn::punctuated::Punctuated;
use syn::{parse_quote, token, Item, Pat, PatTuple, Stmt, Token};
#[test]
fn test_pat_ident() {
match Pat::parse_single.parse2(quote!(self)).unwrap() {
Pat::Ident(_) => (),
value => panic!("expected PatIdent, got {:?}", value),
}
}
#[test]
fn test_pat_path() {
match Pat::parse_single.parse2(quote!(self::CONST)).unwrap() {
Pat::Path(_) => (),
value => panic!("expected PatPath, got {:?}", value),
}
}
#[test]
fn test_leading_vert() {
// https://github.com/rust-lang/rust/blob/1.43.0/src/test/ui/or-patterns/remove-leading-vert.rs
syn::parse_str::<Item>("fn f() {}").unwrap();
syn::parse_str::<Item>("fn fun1(| A: E) {}").unwrap_err();
syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
syn::parse_str::<Stmt>("let | () = ();").unwrap_err();
syn::parse_str::<Stmt>("let (| A): E;").unwrap();
syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap();
syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap();
syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap();
syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap();
syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
}
#[test]
fn test_group() {
let group = Group::new(Delimiter::None, quote!(Some(_)));
let tokens = TokenStream::from_iter([TokenTree::Group(group)]);
let pat = Pat::parse_single.parse2(tokens).unwrap();
snapshot!(pat, @r#"
Pat::TupleStruct {
path: Path {
segments: [
PathSegment {
ident: "Some",
},
],
},
elems: [
Pat::Wild,
],
}
"#);
}
#[test]
fn test_ranges() {
Pat::parse_single.parse_str("..").unwrap();
Pat::parse_single.parse_str("..hi").unwrap();
Pat::parse_single.parse_str("lo..").unwrap();
Pat::parse_single.parse_str("lo..hi").unwrap();
Pat::parse_single.parse_str("..=").unwrap_err();
Pat::parse_single.parse_str("..=hi").unwrap();
Pat::parse_single.parse_str("lo..=").unwrap_err();
Pat::parse_single.parse_str("lo..=hi").unwrap();
Pat::parse_single.parse_str("...").unwrap_err();
Pat::parse_single.parse_str("...hi").unwrap_err();
Pat::parse_single.parse_str("lo...").unwrap_err();
Pat::parse_single.parse_str("lo...hi").unwrap();
Pat::parse_single.parse_str("[lo..]").unwrap_err();
Pat::parse_single.parse_str("[..=hi]").unwrap_err();
Pat::parse_single.parse_str("[(lo..)]").unwrap();
Pat::parse_single.parse_str("[(..=hi)]").unwrap();
Pat::parse_single.parse_str("[lo..=hi]").unwrap();
Pat::parse_single.parse_str("[_, lo.., _]").unwrap_err();
Pat::parse_single.parse_str("[_, ..=hi, _]").unwrap_err();
Pat::parse_single.parse_str("[_, (lo..), _]").unwrap();
Pat::parse_single.parse_str("[_, (..=hi), _]").unwrap();
Pat::parse_single.parse_str("[_, lo..=hi, _]").unwrap();
}
#[test]
fn test_tuple_comma() {
let mut expr = PatTuple {
attrs: Vec::new(),
paren_token: token::Paren::default(),
elems: Punctuated::new(),
};
snapshot!(expr.to_token_stream() as Pat, @"Pat::Tuple");
expr.elems.push_value(parse_quote!(_));
// Must not parse to Pat::Paren
snapshot!(expr.to_token_stream() as Pat, @r#"
Pat::Tuple {
elems: [
Pat::Wild,
Token![,],
],
}
"#);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Pat, @r#"
Pat::Tuple {
elems: [
Pat::Wild,
Token![,],
],
}
"#);
expr.elems.push_value(parse_quote!(_));
snapshot!(expr.to_token_stream() as Pat, @r#"
Pat::Tuple {
elems: [
Pat::Wild,
Token![,],
Pat::Wild,
],
}
"#);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Pat, @r#"
Pat::Tuple {
elems: [
Pat::Wild,
Token![,],
Pat::Wild,
Token![,],
],
}
"#);
}

116
vendor/syn/tests/test_path.rs vendored Normal file
View File

@@ -0,0 +1,116 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::{quote, ToTokens};
use syn::{parse_quote, Expr, Type, TypePath};
#[test]
fn parse_interpolated_leading_component() {
// mimics the token stream corresponding to `$mod::rest`
let tokens = TokenStream::from_iter([
TokenTree::Group(Group::new(Delimiter::None, quote! { first })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("rest", Span::call_site())),
]);
snapshot!(tokens.clone() as Expr, @r#"
Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "first",
},
Token![::],
PathSegment {
ident: "rest",
},
],
},
}
"#);
snapshot!(tokens as Type, @r#"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "first",
},
Token![::],
PathSegment {
ident: "rest",
},
],
},
}
"#);
}
#[test]
fn print_incomplete_qpath() {
// qpath with `as` token
let mut ty: TypePath = parse_quote!(<Self as A>::Q);
snapshot!(ty.to_token_stream(), @"TokenStream(`< Self as A > :: Q`)");
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @"TokenStream(`< Self as A > ::`)");
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @"TokenStream(`< Self >`)");
assert!(ty.path.segments.pop().is_none());
// qpath without `as` token
let mut ty: TypePath = parse_quote!(<Self>::A::B);
snapshot!(ty.to_token_stream(), @"TokenStream(`< Self > :: A :: B`)");
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @"TokenStream(`< Self > :: A ::`)");
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @"TokenStream(`< Self > ::`)");
assert!(ty.path.segments.pop().is_none());
// normal path
let mut ty: TypePath = parse_quote!(Self::A::B);
snapshot!(ty.to_token_stream(), @"TokenStream(`Self :: A :: B`)");
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @"TokenStream(`Self :: A ::`)");
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @"TokenStream(`Self ::`)");
assert!(ty.path.segments.pop().is_some());
snapshot!(ty.to_token_stream(), @"TokenStream(``)");
assert!(ty.path.segments.pop().is_none());
}
#[test]
fn parse_parenthesized_path_arguments_with_disambiguator() {
#[rustfmt::skip]
let tokens = quote!(dyn FnOnce::() -> !);
snapshot!(tokens as Type, @r#"
Type::TraitObject {
dyn_token: Some,
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "FnOnce",
arguments: PathArguments::Parenthesized {
output: ReturnType::Type(
Type::Never,
),
},
},
],
},
}),
],
}
"#);
}

549
vendor/syn/tests/test_precedence.rs vendored Normal file
View File

@@ -0,0 +1,549 @@
// This test does the following for every file in the rust-lang/rust repo:
//
// 1. Parse the file using syn into a syn::File.
// 2. Extract every syn::Expr from the file.
// 3. Print each expr to a string of source code.
// 4. Parse the source code using librustc_parse into a rustc_ast::Expr.
// 5. For both the syn::Expr and rustc_ast::Expr, crawl the syntax tree to
// insert parentheses surrounding every subexpression.
// 6. Serialize the fully parenthesized syn::Expr to a string of source code.
// 7. Parse the fully parenthesized source code using librustc_parse.
// 8. Compare the rustc_ast::Expr resulting from parenthesizing using rustc data
// structures vs syn data structures, ignoring spans. If they agree, rustc's
// parser and syn's parser have identical handling of expression precedence.
#![cfg(not(syn_disable_nightly_tests))]
#![cfg(not(miri))]
#![recursion_limit = "1024"]
#![feature(rustc_private)]
#![allow(
clippy::blocks_in_conditions,
clippy::doc_markdown,
clippy::elidable_lifetime_names,
clippy::explicit_deref_methods,
clippy::let_underscore_untyped,
clippy::manual_assert,
clippy::manual_let_else,
clippy::match_like_matches_macro,
clippy::match_wildcard_for_single_variants,
clippy::needless_lifetimes,
clippy::too_many_lines,
clippy::uninlined_format_args,
clippy::unnecessary_box_returns
)]
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_span;
extern crate smallvec;
extern crate thin_vec;
use crate::common::eq::SpanlessEq;
use crate::common::parse;
use quote::ToTokens;
use rustc_ast::ast;
use rustc_ast_pretty::pprust;
use rustc_span::edition::Edition;
use std::fs;
use std::mem;
use std::path::Path;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
use syn::parse::Parser as _;
#[macro_use]
mod macros;
mod common;
mod repo;
#[path = "../src/scan_expr.rs"]
mod scan_expr;
#[test]
fn test_rustc_precedence() {
repo::rayon_init();
repo::clone_rust();
let abort_after = repo::abort_after();
if abort_after == 0 {
panic!("skipping all precedence tests");
}
let passed = AtomicUsize::new(0);
let failed = AtomicUsize::new(0);
repo::for_each_rust_file(|path| {
let content = fs::read_to_string(path).unwrap();
let (l_passed, l_failed) = match syn::parse_file(&content) {
Ok(file) => {
let edition = repo::edition(path).parse().unwrap();
let exprs = collect_exprs(file);
let (l_passed, l_failed) = test_expressions(path, edition, exprs);
errorf!(
"=== {}: {} passed | {} failed\n",
path.display(),
l_passed,
l_failed,
);
(l_passed, l_failed)
}
Err(msg) => {
errorf!("\nFAIL {} - syn failed to parse: {}\n", path.display(), msg);
(0, 1)
}
};
passed.fetch_add(l_passed, Ordering::Relaxed);
let prev_failed = failed.fetch_add(l_failed, Ordering::Relaxed);
if prev_failed + l_failed >= abort_after {
process::exit(1);
}
});
let passed = passed.into_inner();
let failed = failed.into_inner();
errorf!("\n===== Precedence Test Results =====\n");
errorf!("{} passed | {} failed\n", passed, failed);
if failed > 0 {
panic!("{} failures", failed);
}
}
fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (usize, usize) {
let mut passed = 0;
let mut failed = 0;
rustc_span::create_session_if_not_set_then(edition, |_| {
for expr in exprs {
let expr_tokens = expr.to_token_stream();
let source_code = expr_tokens.to_string();
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) {
e
} else {
failed += 1;
errorf!(
"\nFAIL {} - librustc failed to parse original\n",
path.display(),
);
continue;
};
let syn_parenthesized_code =
syn_parenthesize(expr.clone()).to_token_stream().to_string();
let syn_ast = if let Some(e) = parse::librustc_expr(&syn_parenthesized_code) {
e
} else {
failed += 1;
errorf!(
"\nFAIL {} - librustc failed to parse parenthesized\n",
path.display(),
);
continue;
};
if !SpanlessEq::eq(&syn_ast, &librustc_ast) {
failed += 1;
let syn_pretty = pprust::expr_to_string(&syn_ast);
let librustc_pretty = pprust::expr_to_string(&librustc_ast);
errorf!(
"\nFAIL {}\n{}\nsyn != rustc\n{}\n",
path.display(),
syn_pretty,
librustc_pretty,
);
continue;
}
let expr_invisible = make_parens_invisible(expr);
let Ok(reparsed_expr_invisible) = syn::parse2(expr_invisible.to_token_stream()) else {
failed += 1;
errorf!(
"\nFAIL {} - syn failed to parse invisible delimiters\n{}\n",
path.display(),
source_code,
);
continue;
};
if expr_invisible != reparsed_expr_invisible {
failed += 1;
errorf!(
"\nFAIL {} - mismatch after parsing invisible delimiters\n{}\n",
path.display(),
source_code,
);
continue;
}
if scan_expr::scan_expr.parse2(expr_tokens).is_err() {
failed += 1;
errorf!(
"\nFAIL {} - failed to scan expr\n{}\n",
path.display(),
source_code,
);
continue;
}
passed += 1;
}
});
(passed, failed)
}
fn librustc_parse_and_rewrite(input: &str) -> Option<Box<ast::Expr>> {
parse::librustc_expr(input).map(librustc_parenthesize)
}
fn librustc_parenthesize(mut librustc_expr: Box<ast::Expr>) -> Box<ast::Expr> {
use rustc_ast::ast::{
AssocItem, AssocItemKind, Attribute, BinOpKind, Block, BoundConstness, Expr, ExprField,
ExprKind, GenericArg, GenericBound, Local, LocalKind, Pat, PolyTraitRef, Stmt, StmtKind,
StructExpr, StructRest, TraitBoundModifiers, Ty,
};
use rustc_ast::mut_visit::{walk_flat_map_assoc_item, MutVisitor};
use rustc_ast::visit::{AssocCtxt, BoundKind};
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_span::DUMMY_SP;
use smallvec::SmallVec;
use std::ops::DerefMut;
use thin_vec::ThinVec;
struct FullyParenthesize;
fn contains_let_chain(expr: &Expr) -> bool {
match &expr.kind {
ExprKind::Let(..) => true,
ExprKind::Binary(binop, left, right) => {
binop.node == BinOpKind::And
&& (contains_let_chain(left) || contains_let_chain(right))
}
_ => false,
}
}
fn flat_map_field<T: MutVisitor>(mut f: ExprField, vis: &mut T) -> Vec<ExprField> {
if f.is_shorthand {
noop_visit_expr(&mut f.expr, vis);
} else {
vis.visit_expr(&mut f.expr);
}
vec![f]
}
fn flat_map_stmt<T: MutVisitor>(stmt: Stmt, vis: &mut T) -> Vec<Stmt> {
let kind = match stmt.kind {
// Don't wrap toplevel expressions in statements.
StmtKind::Expr(mut e) => {
noop_visit_expr(&mut e, vis);
StmtKind::Expr(e)
}
StmtKind::Semi(mut e) => {
noop_visit_expr(&mut e, vis);
StmtKind::Semi(e)
}
s => s,
};
vec![Stmt { kind, ..stmt }]
}
fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
match &mut e.kind {
ExprKind::Become(..) => {}
ExprKind::Struct(expr) => {
let StructExpr {
qself,
path,
fields,
rest,
} = expr.deref_mut();
if let Some(qself) = qself {
vis.visit_qself(qself);
}
vis.visit_path(path);
fields.flat_map_in_place(|field| flat_map_field(field, vis));
if let StructRest::Base(rest) = rest {
vis.visit_expr(rest);
}
}
_ => rustc_ast::mut_visit::walk_expr(vis, e),
}
}
impl MutVisitor for FullyParenthesize {
fn visit_expr(&mut self, e: &mut Expr) {
noop_visit_expr(e, self);
match e.kind {
ExprKind::Block(..) | ExprKind::If(..) | ExprKind::Let(..) => {}
ExprKind::Binary(..) if contains_let_chain(e) => {}
_ => {
let inner = mem::replace(e, Expr::dummy());
*e = Expr {
id: ast::DUMMY_NODE_ID,
kind: ExprKind::Paren(Box::new(inner)),
span: DUMMY_SP,
attrs: ThinVec::new(),
tokens: None,
};
}
}
}
fn visit_generic_arg(&mut self, arg: &mut GenericArg) {
match arg {
GenericArg::Lifetime(_lifetime) => {}
GenericArg::Type(arg) => self.visit_ty(arg),
// Don't wrap unbraced const generic arg as that's invalid syntax.
GenericArg::Const(anon_const) => {
if let ExprKind::Block(..) = &mut anon_const.value.kind {
noop_visit_expr(&mut anon_const.value, self);
}
}
}
}
fn visit_param_bound(&mut self, bound: &mut GenericBound, _ctxt: BoundKind) {
match bound {
GenericBound::Trait(PolyTraitRef {
modifiers:
TraitBoundModifiers {
constness: BoundConstness::Maybe(_),
..
},
..
})
| GenericBound::Outlives(..)
| GenericBound::Use(..) => {}
GenericBound::Trait(ty) => self.visit_poly_trait_ref(ty),
}
}
fn visit_block(&mut self, block: &mut Block) {
self.visit_id(&mut block.id);
block
.stmts
.flat_map_in_place(|stmt| flat_map_stmt(stmt, self));
self.visit_span(&mut block.span);
}
fn visit_local(&mut self, local: &mut Local) {
match &mut local.kind {
LocalKind::Decl => {}
LocalKind::Init(init) => {
self.visit_expr(init);
}
LocalKind::InitElse(init, els) => {
self.visit_expr(init);
self.visit_block(els);
}
}
}
fn flat_map_assoc_item(
&mut self,
item: Box<AssocItem>,
ctxt: AssocCtxt,
) -> SmallVec<[Box<AssocItem>; 1]> {
match &item.kind {
AssocItemKind::Const(const_item)
if !const_item.generics.params.is_empty()
|| !const_item.generics.where_clause.predicates.is_empty() =>
{
SmallVec::from([item])
}
_ => walk_flat_map_assoc_item(self, item, ctxt),
}
}
// We don't want to look at expressions that might appear in patterns or
// types yet. We'll look into comparing those in the future. For now
// focus on expressions appearing in other places.
fn visit_pat(&mut self, pat: &mut Pat) {
let _ = pat;
}
fn visit_ty(&mut self, ty: &mut Ty) {
let _ = ty;
}
fn visit_attribute(&mut self, attr: &mut Attribute) {
let _ = attr;
}
}
let mut folder = FullyParenthesize;
folder.visit_expr(&mut librustc_expr);
librustc_expr
}
fn syn_parenthesize(syn_expr: syn::Expr) -> syn::Expr {
use syn::fold::{fold_expr, fold_generic_argument, Fold};
use syn::{token, BinOp, Expr, ExprParen, GenericArgument, MetaNameValue, Pat, Stmt, Type};
struct FullyParenthesize;
fn parenthesize(expr: Expr) -> Expr {
Expr::Paren(ExprParen {
attrs: Vec::new(),
expr: Box::new(expr),
paren_token: token::Paren::default(),
})
}
fn needs_paren(expr: &Expr) -> bool {
match expr {
Expr::Group(_) => unreachable!(),
Expr::If(_) | Expr::Unsafe(_) | Expr::Block(_) | Expr::Let(_) => false,
Expr::Binary(_) => !contains_let_chain(expr),
_ => true,
}
}
fn contains_let_chain(expr: &Expr) -> bool {
match expr {
Expr::Let(_) => true,
Expr::Binary(expr) => {
matches!(expr.op, BinOp::And(_))
&& (contains_let_chain(&expr.left) || contains_let_chain(&expr.right))
}
_ => false,
}
}
impl Fold for FullyParenthesize {
fn fold_expr(&mut self, expr: Expr) -> Expr {
let needs_paren = needs_paren(&expr);
let folded = fold_expr(self, expr);
if needs_paren {
parenthesize(folded)
} else {
folded
}
}
fn fold_generic_argument(&mut self, arg: GenericArgument) -> GenericArgument {
match arg {
GenericArgument::Const(arg) => GenericArgument::Const(match arg {
Expr::Block(_) => fold_expr(self, arg),
// Don't wrap unbraced const generic arg as that's invalid syntax.
_ => arg,
}),
_ => fold_generic_argument(self, arg),
}
}
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
match stmt {
// Don't wrap toplevel expressions in statements.
Stmt::Expr(Expr::Verbatim(_), Some(_)) => stmt,
Stmt::Expr(e, semi) => Stmt::Expr(fold_expr(self, e), semi),
s => s,
}
}
fn fold_meta_name_value(&mut self, meta: MetaNameValue) -> MetaNameValue {
// Don't turn #[p = "..."] into #[p = ("...")].
meta
}
// We don't want to look at expressions that might appear in patterns or
// types yet. We'll look into comparing those in the future. For now
// focus on expressions appearing in other places.
fn fold_pat(&mut self, pat: Pat) -> Pat {
pat
}
fn fold_type(&mut self, ty: Type) -> Type {
ty
}
}
let mut folder = FullyParenthesize;
folder.fold_expr(syn_expr)
}
fn make_parens_invisible(expr: syn::Expr) -> syn::Expr {
use syn::fold::{fold_expr, fold_stmt, Fold};
use syn::{token, Expr, ExprGroup, ExprParen, Stmt};
struct MakeParensInvisible;
impl Fold for MakeParensInvisible {
fn fold_expr(&mut self, mut expr: Expr) -> Expr {
if let Expr::Paren(paren) = expr {
expr = Expr::Group(ExprGroup {
attrs: paren.attrs,
group_token: token::Group(paren.paren_token.span.join()),
expr: paren.expr,
});
}
fold_expr(self, expr)
}
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
if let Stmt::Expr(expr @ (Expr::Binary(_) | Expr::Call(_) | Expr::Cast(_)), None) = stmt
{
Stmt::Expr(
Expr::Paren(ExprParen {
attrs: Vec::new(),
paren_token: token::Paren::default(),
expr: Box::new(fold_expr(self, expr)),
}),
None,
)
} else {
fold_stmt(self, stmt)
}
}
}
let mut folder = MakeParensInvisible;
folder.fold_expr(expr)
}
/// Walk through a crate collecting all expressions we can find in it.
fn collect_exprs(file: syn::File) -> Vec<syn::Expr> {
use syn::fold::Fold;
use syn::punctuated::Punctuated;
use syn::{token, ConstParam, Expr, ExprTuple, Pat, Path};
struct CollectExprs(Vec<Expr>);
impl Fold for CollectExprs {
fn fold_expr(&mut self, expr: Expr) -> Expr {
match expr {
Expr::Verbatim(_) => {}
_ => self.0.push(expr),
}
Expr::Tuple(ExprTuple {
attrs: vec![],
elems: Punctuated::new(),
paren_token: token::Paren::default(),
})
}
fn fold_pat(&mut self, pat: Pat) -> Pat {
pat
}
fn fold_path(&mut self, path: Path) -> Path {
// Skip traversing into const generic path arguments
path
}
fn fold_const_param(&mut self, const_param: ConstParam) -> ConstParam {
const_param
}
}
let mut folder = CollectExprs(vec![]);
folder.fold_file(file);
folder.0
}

327
vendor/syn/tests/test_receiver.rs vendored Normal file
View File

@@ -0,0 +1,327 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use syn::{parse_quote, TraitItemFn};
#[test]
fn test_by_value() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_value(self: Self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
})
"#);
}
#[test]
fn test_by_mut_value() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_mut(mut self: Self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
mutability: Some,
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
})
"#);
}
#[test]
fn test_by_ref() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_ref(self: &Self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Reference {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"#);
}
#[test]
fn test_by_box() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_box(self: Box<Self>);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Box",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
}),
],
},
},
],
},
},
})
"#);
}
#[test]
fn test_by_pin() {
let TraitItemFn { sig, .. } = parse_quote! {
fn by_pin(self: Pin<Self>);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Pin",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
}),
],
},
},
],
},
},
})
"#);
}
#[test]
fn test_explicit_type() {
let TraitItemFn { sig, .. } = parse_quote! {
fn explicit_type(self: Pin<MyType>);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
colon_token: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Pin",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "MyType",
},
],
},
}),
],
},
},
],
},
},
})
"#);
}
#[test]
fn test_value_shorthand() {
let TraitItemFn { sig, .. } = parse_quote! {
fn value_shorthand(self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
})
"#);
}
#[test]
fn test_mut_value_shorthand() {
let TraitItemFn { sig, .. } = parse_quote! {
fn mut_value_shorthand(mut self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
mutability: Some,
ty: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
})
"#);
}
#[test]
fn test_ref_shorthand() {
let TraitItemFn { sig, .. } = parse_quote! {
fn ref_shorthand(&self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
reference: Some(None),
ty: Type::Reference {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"#);
}
#[test]
fn test_ref_shorthand_with_lifetime() {
let TraitItemFn { sig, .. } = parse_quote! {
fn ref_shorthand(&'a self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
reference: Some(Some(Lifetime {
ident: "a",
})),
ty: Type::Reference {
lifetime: Some(Lifetime {
ident: "a",
}),
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"#);
}
#[test]
fn test_ref_mut_shorthand() {
let TraitItemFn { sig, .. } = parse_quote! {
fn ref_mut_shorthand(&mut self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
reference: Some(None),
mutability: Some,
ty: Type::Reference {
mutability: Some,
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"#);
}
#[test]
fn test_ref_mut_shorthand_with_lifetime() {
let TraitItemFn { sig, .. } = parse_quote! {
fn ref_mut_shorthand(&'a mut self);
};
snapshot!(&sig.inputs[0], @r#"
FnArg::Receiver(Receiver {
reference: Some(Some(Lifetime {
ident: "a",
})),
mutability: Some,
ty: Type::Reference {
lifetime: Some(Lifetime {
ident: "a",
}),
mutability: Some,
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Self",
},
],
},
},
},
})
"#);
}

249
vendor/syn/tests/test_round_trip.rs vendored Normal file
View File

@@ -0,0 +1,249 @@
#![cfg(not(syn_disable_nightly_tests))]
#![cfg(not(miri))]
#![recursion_limit = "1024"]
#![feature(rustc_private)]
#![allow(
clippy::blocks_in_conditions,
clippy::elidable_lifetime_names,
clippy::manual_assert,
clippy::manual_let_else,
clippy::match_like_matches_macro,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#![allow(mismatched_lifetime_syntaxes)]
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_error_messages;
extern crate rustc_errors;
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_session;
extern crate rustc_span;
use crate::common::eq::SpanlessEq;
use quote::quote;
use rustc_ast::ast::{
AngleBracketedArg, Crate, GenericArg, GenericArgs, GenericParamKind, Generics,
};
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast_pretty::pprust;
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_error_messages::{DiagMessage, LazyFallbackBundle};
use rustc_errors::{translation, Diag, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::FileName;
use std::borrow::Cow;
use std::fs;
use std::panic;
use std::path::Path;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::time::Instant;
#[macro_use]
mod macros;
mod common;
mod repo;
#[test]
fn test_round_trip() {
repo::rayon_init();
repo::clone_rust();
let abort_after = repo::abort_after();
if abort_after == 0 {
panic!("skipping all round_trip tests");
}
let failed = AtomicUsize::new(0);
repo::for_each_rust_file(|path| test(path, &failed, abort_after));
let failed = failed.into_inner();
if failed > 0 {
panic!("{} failures", failed);
}
}
fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) {
let failed = || {
let prev_failed = failed.fetch_add(1, Ordering::Relaxed);
if prev_failed + 1 >= abort_after {
process::exit(1);
}
};
let content = fs::read_to_string(path).unwrap();
let (back, elapsed) = match panic::catch_unwind(|| {
let start = Instant::now();
let result = syn::parse_file(&content);
let elapsed = start.elapsed();
result.map(|krate| (quote!(#krate).to_string(), elapsed))
}) {
Err(_) => {
errorf!("=== {}: syn panic\n", path.display());
failed();
return;
}
Ok(Err(msg)) => {
errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg);
failed();
return;
}
Ok(Ok(result)) => result,
};
let edition = repo::edition(path).parse().unwrap();
rustc_span::create_session_if_not_set_then(edition, |_| {
let equal = match panic::catch_unwind(|| {
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
let sess = ParseSess::new(locale_resources);
let before = match librustc_parse(content, &sess) {
Ok(before) => before,
Err(diagnostic) => {
errorf!(
"=== {}: ignore - librustc failed to parse original content: {}\n",
path.display(),
translate_message(&diagnostic),
);
diagnostic.cancel();
return Err(true);
}
};
let after = match librustc_parse(back, &sess) {
Ok(after) => after,
Err(diagnostic) => {
errorf!("=== {}: librustc failed to parse", path.display());
diagnostic.emit();
return Err(false);
}
};
Ok((before, after))
}) {
Err(_) => {
errorf!("=== {}: ignoring librustc panic\n", path.display());
true
}
Ok(Err(equal)) => equal,
Ok(Ok((mut before, mut after))) => {
normalize(&mut before);
normalize(&mut after);
if SpanlessEq::eq(&before, &after) {
errorf!(
"=== {}: pass in {}ms\n",
path.display(),
elapsed.as_secs() * 1000 + u64::from(elapsed.subsec_nanos()) / 1_000_000
);
true
} else {
errorf!(
"=== {}: FAIL\n{}\n!=\n{}\n",
path.display(),
pprust::crate_to_string_for_macros(&before),
pprust::crate_to_string_for_macros(&after),
);
false
}
}
};
if !equal {
failed();
}
});
}
fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> {
static COUNTER: AtomicUsize = AtomicUsize::new(0);
let counter = COUNTER.fetch_add(1, Ordering::Relaxed);
let name = FileName::Custom(format!("test_round_trip{}", counter));
let mut parser = parse::new_parser_from_source_str(sess, name, content).unwrap();
parser.parse_crate_mod()
}
fn translate_message(diagnostic: &Diag) -> Cow<'static, str> {
thread_local! {
static FLUENT_BUNDLE: LazyFallbackBundle = {
let locale_resources = rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec();
let with_directionality_markers = false;
rustc_error_messages::fallback_fluent_bundle(locale_resources, with_directionality_markers)
};
}
let message = &diagnostic.messages[0].0;
let args = translation::to_fluent_args(diagnostic.args.iter());
let (identifier, attr) = match message {
DiagMessage::Str(msg) | DiagMessage::Translated(msg) => return msg.clone(),
DiagMessage::FluentIdentifier(identifier, attr) => (identifier, attr),
};
FLUENT_BUNDLE.with(|fluent_bundle| {
let message = fluent_bundle
.get_message(identifier)
.expect("missing diagnostic in fluent bundle");
let value = match attr {
Some(attr) => message
.get_attribute(attr)
.expect("missing attribute in fluent message")
.value(),
None => message.value().expect("missing value in fluent message"),
};
let mut err = Vec::new();
let translated = fluent_bundle.format_pattern(value, Some(&args), &mut err);
assert!(err.is_empty());
Cow::Owned(translated.into_owned())
})
}
fn normalize(krate: &mut Crate) {
struct NormalizeVisitor;
impl MutVisitor for NormalizeVisitor {
fn visit_generic_args(&mut self, e: &mut GenericArgs) {
if let GenericArgs::AngleBracketed(e) = e {
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum Group {
Lifetimes,
TypesAndConsts,
Constraints,
}
e.args.sort_by_key(|arg| match arg {
AngleBracketedArg::Arg(arg) => match arg {
GenericArg::Lifetime(_) => Group::Lifetimes,
GenericArg::Type(_) | GenericArg::Const(_) => Group::TypesAndConsts,
},
AngleBracketedArg::Constraint(_) => Group::Constraints,
});
}
mut_visit::walk_generic_args(self, e);
}
fn visit_generics(&mut self, e: &mut Generics) {
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum Group {
Lifetimes,
TypesAndConsts,
}
e.params.sort_by_key(|param| match param.kind {
GenericParamKind::Lifetime => Group::Lifetimes,
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {
Group::TypesAndConsts
}
});
e.params
.flat_map_in_place(|param| self.flat_map_generic_param(param));
if e.where_clause.predicates.is_empty() {
e.where_clause.has_where_token = false;
}
}
}
NormalizeVisitor.visit_crate(krate);
}

73
vendor/syn/tests/test_shebang.rs vendored Normal file
View File

@@ -0,0 +1,73 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
#[test]
fn test_basic() {
let content = "#!/usr/bin/env rustx\nfn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r##"
File {
shebang: Some("#!/usr/bin/env rustx"),
items: [
Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
},
],
}
"##);
}
#[test]
fn test_comment() {
let content = "#!//am/i/a/comment\n[allow(dead_code)] fn main() {}";
let file = syn::parse_file(content).unwrap();
snapshot!(file, @r#"
File {
attrs: [
Attribute {
style: AttrStyle::Inner,
meta: Meta::List {
path: Path {
segments: [
PathSegment {
ident: "allow",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`dead_code`),
},
},
],
items: [
Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
},
],
}
"#);
}

54
vendor/syn/tests/test_size.rs vendored Normal file
View File

@@ -0,0 +1,54 @@
// Assumes proc-macro2's "span-locations" feature is off.
use std::mem;
use syn::{Expr, Item, Lit, Pat, Type};
#[rustversion::attr(before(2022-11-24), ignore = "requires nightly-2022-11-24 or newer")]
#[rustversion::attr(
since(2022-11-24),
cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")
)]
#[test]
fn test_expr_size() {
assert_eq!(mem::size_of::<Expr>(), 176);
}
#[rustversion::attr(before(2022-09-09), ignore = "requires nightly-2022-09-09 or newer")]
#[rustversion::attr(
since(2022-09-09),
cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")
)]
#[test]
fn test_item_size() {
assert_eq!(mem::size_of::<Item>(), 352);
}
#[rustversion::attr(before(2023-04-29), ignore = "requires nightly-2023-04-29 or newer")]
#[rustversion::attr(
since(2023-04-29),
cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")
)]
#[test]
fn test_type_size() {
assert_eq!(mem::size_of::<Type>(), 224);
}
#[rustversion::attr(before(2023-04-29), ignore = "requires nightly-2023-04-29 or newer")]
#[rustversion::attr(
since(2023-04-29),
cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")
)]
#[test]
fn test_pat_size() {
assert_eq!(mem::size_of::<Pat>(), 184);
}
#[rustversion::attr(before(2023-12-20), ignore = "requires nightly-2023-12-20 or newer")]
#[rustversion::attr(
since(2023-12-20),
cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")
)]
#[test]
fn test_lit_size() {
assert_eq!(mem::size_of::<Lit>(), 24);
}

337
vendor/syn/tests/test_stmt.rs vendored Normal file
View File

@@ -0,0 +1,337 @@
#![allow(
clippy::assertions_on_result_states,
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::non_ascii_literal,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
use quote::{quote, ToTokens as _};
use syn::parse::Parser as _;
use syn::{Block, Stmt};
#[test]
fn test_raw_operator() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw const x;").unwrap();
snapshot!(stmt, @r#"
Stmt::Local {
pat: Pat::Wild,
init: Some(LocalInit {
expr: Expr::RawAddr {
mutability: PointerMutability::Const,
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "x",
},
],
},
},
},
}),
}
"#);
}
#[test]
fn test_raw_variable() {
let stmt = syn::parse_str::<Stmt>("let _ = &raw;").unwrap();
snapshot!(stmt, @r#"
Stmt::Local {
pat: Pat::Wild,
init: Some(LocalInit {
expr: Expr::Reference {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "raw",
},
],
},
},
},
}),
}
"#);
}
#[test]
fn test_raw_invalid() {
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
}
#[test]
fn test_none_group() {
// «∅ async fn f() {} ∅»
let tokens = TokenStream::from_iter([TokenTree::Group(Group::new(
Delimiter::None,
TokenStream::from_iter([
TokenTree::Ident(Ident::new("async", Span::call_site())),
TokenTree::Ident(Ident::new("fn", Span::call_site())),
TokenTree::Ident(Ident::new("f", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
]),
))]);
snapshot!(tokens as Stmt, @r#"
Stmt::Item(Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
asyncness: Some,
ident: "f",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [],
},
})
"#);
let tokens = Group::new(Delimiter::None, quote!(let None = None)).to_token_stream();
let stmts = Block::parse_within.parse2(tokens).unwrap();
snapshot!(stmts, @r#"
[
Stmt::Expr(
Expr::Group {
expr: Expr::Let {
pat: Pat::Ident {
ident: "None",
},
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "None",
},
],
},
},
},
},
None,
),
]
"#);
}
#[test]
fn test_let_dot_dot() {
let tokens = quote! {
let .. = 10;
};
snapshot!(tokens as Stmt, @r#"
Stmt::Local {
pat: Pat::Rest,
init: Some(LocalInit {
expr: Expr::Lit {
lit: 10,
},
}),
}
"#);
}
#[test]
fn test_let_else() {
let tokens = quote! {
let Some(x) = None else { return 0; };
};
snapshot!(tokens as Stmt, @r#"
Stmt::Local {
pat: Pat::TupleStruct {
path: Path {
segments: [
PathSegment {
ident: "Some",
},
],
},
elems: [
Pat::Ident {
ident: "x",
},
],
},
init: Some(LocalInit {
expr: Expr::Path {
path: Path {
segments: [
PathSegment {
ident: "None",
},
],
},
},
diverge: Some(Expr::Block {
block: Block {
stmts: [
Stmt::Expr(
Expr::Return {
expr: Some(Expr::Lit {
lit: 0,
}),
},
Some,
),
],
},
}),
}),
}
"#);
}
#[test]
fn test_macros() {
let tokens = quote! {
fn main() {
macro_rules! mac {}
thread_local! { static FOO }
println!("");
vec![]
}
};
snapshot!(tokens as Stmt, @r#"
Stmt::Item(Item::Fn {
vis: Visibility::Inherited,
sig: Signature {
ident: "main",
generics: Generics,
output: ReturnType::Default,
},
block: Block {
stmts: [
Stmt::Item(Item::Macro {
ident: Some("mac"),
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "macro_rules",
},
],
},
delimiter: MacroDelimiter::Brace,
tokens: TokenStream(``),
},
}),
Stmt::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "thread_local",
},
],
},
delimiter: MacroDelimiter::Brace,
tokens: TokenStream(`static FOO`),
},
},
Stmt::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "println",
},
],
},
delimiter: MacroDelimiter::Paren,
tokens: TokenStream(`""`),
},
semi_token: Some,
},
Stmt::Expr(
Expr::Macro {
mac: Macro {
path: Path {
segments: [
PathSegment {
ident: "vec",
},
],
},
delimiter: MacroDelimiter::Bracket,
tokens: TokenStream(``),
},
},
None,
),
],
},
})
"#);
}
#[test]
fn test_early_parse_loop() {
// The following is an Expr::Loop followed by Expr::Tuple. It is not an
// Expr::Call.
let tokens = quote! {
loop {}
()
};
let stmts = Block::parse_within.parse2(tokens).unwrap();
snapshot!(stmts, @r#"
[
Stmt::Expr(
Expr::Loop {
body: Block {
stmts: [],
},
},
None,
),
Stmt::Expr(
Expr::Tuple,
None,
),
]
"#);
let tokens = quote! {
'a: loop {}
()
};
let stmts = Block::parse_within.parse2(tokens).unwrap();
snapshot!(stmts, @r#"
[
Stmt::Expr(
Expr::Loop {
label: Some(Label {
name: Lifetime {
ident: "a",
},
}),
body: Block {
stmts: [],
},
},
None,
),
Stmt::Expr(
Expr::Tuple,
None,
),
]
"#);
}

38
vendor/syn/tests/test_token_trees.rs vendored Normal file
View File

@@ -0,0 +1,38 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::TokenStream;
use quote::quote;
use syn::Lit;
#[test]
fn test_struct() {
let input = "
#[derive(Debug, Clone)]
pub struct Item {
pub ident: Ident,
pub attrs: Vec<Attribute>,
}
";
snapshot!(input as TokenStream, @r##"
TokenStream(
`# [derive (Debug , Clone)] pub struct Item { pub ident : Ident , pub attrs : Vec < Attribute >, }`,
)
"##);
}
#[test]
fn test_literal_mangling() {
let code = "0_4";
let parsed: Lit = syn::parse_str(code).unwrap();
assert_eq!(code, quote!(#parsed).to_string());
}

471
vendor/syn/tests/test_ty.rs vendored Normal file
View File

@@ -0,0 +1,471 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::{quote, ToTokens as _};
use syn::punctuated::Punctuated;
use syn::{parse_quote, token, Token, Type, TypeTuple};
#[test]
fn test_mut_self() {
syn::parse_str::<Type>("fn(mut self)").unwrap();
syn::parse_str::<Type>("fn(mut self,)").unwrap();
syn::parse_str::<Type>("fn(mut self: ())").unwrap();
syn::parse_str::<Type>("fn(mut self: ...)").unwrap_err();
syn::parse_str::<Type>("fn(mut self: mut self)").unwrap_err();
syn::parse_str::<Type>("fn(mut self::T)").unwrap_err();
}
#[test]
fn test_macro_variable_type() {
// mimics the token stream corresponding to `$ty<T>`
let tokens = TokenStream::from_iter([
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Ident(Ident::new("T", Span::call_site())),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r#"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "ty",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
}),
],
},
},
],
},
}
"#);
// mimics the token stream corresponding to `$ty::<T>`
let tokens = TokenStream::from_iter([
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Ident(Ident::new("T", Span::call_site())),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r#"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "ty",
arguments: PathArguments::AngleBracketed {
colon2_token: Some,
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
}),
],
},
},
],
},
}
"#);
}
#[test]
fn test_group_angle_brackets() {
// mimics the token stream corresponding to `Option<$ty>`
let tokens = TokenStream::from_iter([
TokenTree::Ident(Ident::new("Option", Span::call_site())),
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
]);
snapshot!(tokens as Type, @r#"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Option",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Group {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
},
],
},
}),
],
},
},
],
},
},
}),
],
},
},
],
},
}
"#);
}
#[test]
fn test_group_colons() {
// mimics the token stream corresponding to `$ty::Item`
let tokens = TokenStream::from_iter([
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("Item", Span::call_site())),
]);
snapshot!(tokens as Type, @r#"
Type::Path {
path: Path {
segments: [
PathSegment {
ident: "Vec",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Type(Type::Path {
path: Path {
segments: [
PathSegment {
ident: "u8",
},
],
},
}),
],
},
},
Token![::],
PathSegment {
ident: "Item",
},
],
},
}
"#);
let tokens = TokenStream::from_iter([
TokenTree::Group(Group::new(Delimiter::None, quote! { [T] })),
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Ident(Ident::new("Element", Span::call_site())),
]);
snapshot!(tokens as Type, @r#"
Type::Path {
qself: Some(QSelf {
ty: Type::Slice {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "T",
},
],
},
},
},
position: 0,
}),
path: Path {
leading_colon: Some,
segments: [
PathSegment {
ident: "Element",
},
],
},
}
"#);
}
#[test]
fn test_trait_object() {
let tokens = quote!(dyn for<'a> Trait<'a> + 'static);
snapshot!(tokens as Type, @r#"
Type::TraitObject {
dyn_token: Some,
bounds: [
TypeParamBound::Trait(TraitBound {
lifetimes: Some(BoundLifetimes {
lifetimes: [
GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime {
ident: "a",
},
}),
],
}),
path: Path {
segments: [
PathSegment {
ident: "Trait",
arguments: PathArguments::AngleBracketed {
args: [
GenericArgument::Lifetime(Lifetime {
ident: "a",
}),
],
},
},
],
},
}),
Token![+],
TypeParamBound::Lifetime {
ident: "static",
},
],
}
"#);
let tokens = quote!(dyn 'a + Trait);
snapshot!(tokens as Type, @r#"
Type::TraitObject {
dyn_token: Some,
bounds: [
TypeParamBound::Lifetime {
ident: "a",
},
Token![+],
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
}),
],
}
"#);
// None of the following are valid Rust types.
syn::parse_str::<Type>("for<'a> dyn Trait<'a>").unwrap_err();
syn::parse_str::<Type>("dyn for<'a> 'a + Trait").unwrap_err();
}
#[test]
fn test_trailing_plus() {
#[rustfmt::skip]
let tokens = quote!(impl Trait +);
snapshot!(tokens as Type, @r#"
Type::ImplTrait {
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
}),
Token![+],
],
}
"#);
#[rustfmt::skip]
let tokens = quote!(dyn Trait +);
snapshot!(tokens as Type, @r#"
Type::TraitObject {
dyn_token: Some,
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
}),
Token![+],
],
}
"#);
#[rustfmt::skip]
let tokens = quote!(Trait +);
snapshot!(tokens as Type, @r#"
Type::TraitObject {
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Trait",
},
],
},
}),
Token![+],
],
}
"#);
}
#[test]
fn test_tuple_comma() {
let mut expr = TypeTuple {
paren_token: token::Paren::default(),
elems: Punctuated::new(),
};
snapshot!(expr.to_token_stream() as Type, @"Type::Tuple");
expr.elems.push_value(parse_quote!(_));
// Must not parse to Type::Paren
snapshot!(expr.to_token_stream() as Type, @r#"
Type::Tuple {
elems: [
Type::Infer,
Token![,],
],
}
"#);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Type, @r#"
Type::Tuple {
elems: [
Type::Infer,
Token![,],
],
}
"#);
expr.elems.push_value(parse_quote!(_));
snapshot!(expr.to_token_stream() as Type, @r#"
Type::Tuple {
elems: [
Type::Infer,
Token![,],
Type::Infer,
],
}
"#);
expr.elems.push_punct(<Token![,]>::default());
snapshot!(expr.to_token_stream() as Type, @r#"
Type::Tuple {
elems: [
Type::Infer,
Token![,],
Type::Infer,
Token![,],
],
}
"#);
}
#[test]
fn test_impl_trait_use() {
let tokens = quote! {
impl Sized + use<'_, 'a, A, Test>
};
snapshot!(tokens as Type, @r#"
Type::ImplTrait {
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
}),
Token![+],
TypeParamBound::PreciseCapture(PreciseCapture {
params: [
CapturedParam::Lifetime(Lifetime {
ident: "_",
}),
Token![,],
CapturedParam::Lifetime(Lifetime {
ident: "a",
}),
Token![,],
CapturedParam::Ident("A"),
Token![,],
CapturedParam::Ident("Test"),
],
}),
],
}
"#);
let trailing = quote! {
impl Sized + use<'_,>
};
snapshot!(trailing as Type, @r#"
Type::ImplTrait {
bounds: [
TypeParamBound::Trait(TraitBound {
path: Path {
segments: [
PathSegment {
ident: "Sized",
},
],
},
}),
Token![+],
TypeParamBound::PreciseCapture(PreciseCapture {
params: [
CapturedParam::Lifetime(Lifetime {
ident: "_",
}),
Token![,],
],
}),
],
}
"#);
}

70
vendor/syn/tests/test_unparenthesize.rs vendored Normal file
View File

@@ -0,0 +1,70 @@
#![cfg(not(miri))]
#![recursion_limit = "1024"]
#![feature(rustc_private)]
#![allow(
clippy::elidable_lifetime_names,
clippy::manual_assert,
clippy::match_like_matches_macro,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
use crate::common::visit::{AsIfPrinted, FlattenParens};
use quote::ToTokens as _;
use std::fs;
use std::panic;
use std::path::Path;
use std::sync::atomic::{AtomicUsize, Ordering};
use syn::visit_mut::VisitMut as _;
#[macro_use]
mod macros;
mod common;
mod repo;
#[test]
fn test_unparenthesize() {
repo::rayon_init();
repo::clone_rust();
let failed = AtomicUsize::new(0);
repo::for_each_rust_file(|path| test(path, &failed));
let failed = failed.into_inner();
if failed > 0 {
panic!("{} failures", failed);
}
}
fn test(path: &Path, failed: &AtomicUsize) {
let content = fs::read_to_string(path).unwrap();
match panic::catch_unwind(|| -> syn::Result<()> {
let mut before = syn::parse_file(&content)?;
FlattenParens::discard_attrs().visit_file_mut(&mut before);
let printed = before.to_token_stream();
let mut after = syn::parse2::<syn::File>(printed.clone())?;
FlattenParens::discard_attrs().visit_file_mut(&mut after);
// Normalize features that we expect Syn not to print.
AsIfPrinted.visit_file_mut(&mut before);
if before != after {
errorf!("=== {}\n", path.display());
if failed.fetch_add(1, Ordering::Relaxed) == 0 {
errorf!("BEFORE:\n{:#?}\nAFTER:\n{:#?}\n", before, after);
}
}
Ok(())
}) {
Err(_) => {
errorf!("=== {}: syn panic\n", path.display());
failed.fetch_add(1, Ordering::Relaxed);
}
Ok(Err(msg)) => {
errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg);
failed.fetch_add(1, Ordering::Relaxed);
}
Ok(Ok(())) => {}
}
}

191
vendor/syn/tests/test_visibility.rs vendored Normal file
View File

@@ -0,0 +1,191 @@
#![allow(
clippy::elidable_lifetime_names,
clippy::needless_lifetimes,
clippy::uninlined_format_args
)]
#[macro_use]
mod snapshot;
mod debug;
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
use quote::quote;
use syn::parse::{Parse, ParseStream};
use syn::{DeriveInput, Result, Visibility};
#[derive(Debug)]
struct VisRest {
vis: Visibility,
rest: TokenStream,
}
impl Parse for VisRest {
fn parse(input: ParseStream) -> Result<Self> {
Ok(VisRest {
vis: input.parse()?,
rest: input.parse()?,
})
}
}
macro_rules! assert_vis_parse {
($input:expr, Ok($p:pat)) => {
assert_vis_parse!($input, Ok($p) + "");
};
($input:expr, Ok($p:pat) + $rest:expr) => {
let expected = $rest.parse::<TokenStream>().unwrap();
let parse: VisRest = syn::parse_str($input).unwrap();
match parse.vis {
$p => {}
_ => panic!("expected {}, got {:?}", stringify!($p), parse.vis),
}
// NOTE: Round-trips through `to_string` to avoid potential whitespace
// diffs.
assert_eq!(parse.rest.to_string(), expected.to_string());
};
($input:expr, Err) => {
syn::parse2::<VisRest>($input.parse().unwrap()).unwrap_err();
};
}
#[test]
fn test_pub() {
assert_vis_parse!("pub", Ok(Visibility::Public(_)));
}
#[test]
fn test_inherited() {
assert_vis_parse!("", Ok(Visibility::Inherited));
}
#[test]
fn test_in() {
assert_vis_parse!("pub(in foo::bar)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_crate() {
assert_vis_parse!("pub(crate)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_self() {
assert_vis_parse!("pub(self)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_pub_super() {
assert_vis_parse!("pub(super)", Ok(Visibility::Restricted(_)));
}
#[test]
fn test_missing_in() {
assert_vis_parse!("pub(foo::bar)", Ok(Visibility::Public(_)) + "(foo::bar)");
}
#[test]
fn test_missing_in_path() {
assert_vis_parse!("pub(in)", Err);
}
#[test]
fn test_crate_path() {
assert_vis_parse!(
"pub(crate::A, crate::B)",
Ok(Visibility::Public(_)) + "(crate::A, crate::B)"
);
}
#[test]
fn test_junk_after_in() {
assert_vis_parse!("pub(in some::path @@garbage)", Err);
}
#[test]
fn test_inherited_vis_named_field() {
// mimics `struct S { $vis $field: () }` where $vis is empty
let tokens = TokenStream::from_iter([
TokenTree::Ident(Ident::new("struct", Span::call_site())),
TokenTree::Ident(Ident::new("S", Span::call_site())),
TokenTree::Group(Group::new(
Delimiter::Brace,
TokenStream::from_iter([
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::None, quote!(f))),
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
]),
)),
]);
snapshot!(tokens as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Named {
named: [
Field {
vis: Visibility::Inherited,
ident: Some("f"),
colon_token: Some,
ty: Type::Tuple,
},
],
},
},
}
"#);
}
#[test]
fn test_inherited_vis_unnamed_field() {
// mimics `struct S($vis $ty);` where $vis is empty
let tokens = TokenStream::from_iter([
TokenTree::Ident(Ident::new("struct", Span::call_site())),
TokenTree::Ident(Ident::new("S", Span::call_site())),
TokenTree::Group(Group::new(
Delimiter::Parenthesis,
TokenStream::from_iter([
TokenTree::Group(Group::new(Delimiter::None, TokenStream::new())),
TokenTree::Group(Group::new(Delimiter::None, quote!(str))),
]),
)),
TokenTree::Punct(Punct::new(';', Spacing::Alone)),
]);
snapshot!(tokens as DeriveInput, @r#"
DeriveInput {
vis: Visibility::Inherited,
ident: "S",
generics: Generics,
data: Data::Struct {
fields: Fields::Unnamed {
unnamed: [
Field {
vis: Visibility::Inherited,
ty: Type::Group {
elem: Type::Path {
path: Path {
segments: [
PathSegment {
ident: "str",
},
],
},
},
},
},
],
},
semi_token: Some,
},
}
"#);
}

33
vendor/syn/tests/zzz_stable.rs vendored Normal file
View File

@@ -0,0 +1,33 @@
#![cfg(syn_disable_nightly_tests)]
use std::io::{self, Write};
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
const MSG: &str = "\
‖ WARNING:
‖ This is not a nightly compiler so not all tests were able to
‖ run. Syn includes tests that compare Syn's parser against the
‖ compiler's parser, which requires access to unstable librustc
‖ data structures and a nightly compiler.
";
#[test]
fn notice() -> io::Result<()> {
let header = "WARNING";
let index_of_header = MSG.find(header).unwrap();
let before = &MSG[..index_of_header];
let after = &MSG[index_of_header + header.len()..];
let mut stderr = StandardStream::stderr(ColorChoice::Auto);
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", before)?;
stderr.set_color(ColorSpec::new().set_bold(true).set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", header)?;
stderr.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
write!(&mut stderr, "{}", after)?;
stderr.reset()?;
Ok(())
}