macros: Upgrade syn to 2.0

This commit is contained in:
Jonas Platte 2023-03-19 18:26:41 +01:00
parent fd895fab40
commit 53622f8ce0
No known key found for this signature in database
GPG Key ID: AAA7A61F696C3E0C
12 changed files with 47 additions and 46 deletions

View File

@ -21,5 +21,5 @@ proc-macro2 = "1.0.24"
quote = "1.0.8" quote = "1.0.8"
ruma-identifiers-validation = { workspace = true } ruma-identifiers-validation = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
syn = { version = "1.0.57", features = ["extra-traits", "full", "visit"] } syn = { version = "2.0.2", features = ["extra-traits", "full", "visit"] }
toml = { version = "0.7.1", default-features = false, features = ["parse"] } toml = { version = "0.7.1", default-features = false, features = ["parse"] }

View File

@ -68,8 +68,7 @@ impl Parse for Metadata {
let field_values; let field_values;
braced!(field_values in input); braced!(field_values in input);
let field_values = let field_values = field_values.parse_terminated(FieldValue::parse, Token![,])?;
field_values.parse_terminated::<FieldValue, Token![,]>(FieldValue::parse)?;
let mut description = None; let mut description = None;
let mut method = None; let mut method = None;

View File

@ -55,7 +55,7 @@ pub fn expand_derive_request(input: DeriveInput) -> syn::Result<TokenStream> {
let mut error_ty = None; let mut error_ty = None;
for attr in input.attrs { for attr in input.attrs {
if !attr.path.is_ident("ruma_api") { if !attr.path().is_ident("ruma_api") {
continue; continue;
} }
@ -378,7 +378,7 @@ impl TryFrom<Field> for RequestField {
fn try_from(mut field: Field) -> syn::Result<Self> { fn try_from(mut field: Field) -> syn::Result<Self> {
let (mut api_attrs, attrs) = let (mut api_attrs, attrs) =
field.attrs.into_iter().partition::<Vec<_>, _>(|attr| attr.path.is_ident("ruma_api")); field.attrs.into_iter().partition::<Vec<_>, _>(|attr| attr.path().is_ident("ruma_api"));
field.attrs = attrs; field.attrs = attrs;
let kind_attr = match api_attrs.as_slice() { let kind_attr = match api_attrs.as_slice() {

View File

@ -33,7 +33,7 @@ impl Request {
let (parse_query, query_vars) = if let Some(field) = self.query_map_field() { let (parse_query, query_vars) = if let Some(field) = self.query_map_field() {
let cfg_attrs = let cfg_attrs =
field.attrs.iter().filter(|a| a.path.is_ident("cfg")).collect::<Vec<_>>(); field.attrs.iter().filter(|a| a.path().is_ident("cfg")).collect::<Vec<_>>();
let field_name = field.ident.as_ref().expect("expected field to have an identifier"); let field_name = field.ident.as_ref().expect("expected field to have an identifier");
let parse = quote! { let parse = quote! {
#( #cfg_attrs )* #( #cfg_attrs )*
@ -71,7 +71,7 @@ impl Request {
.header_fields() .header_fields()
.map(|(field, header_name)| { .map(|(field, header_name)| {
let cfg_attrs = let cfg_attrs =
field.attrs.iter().filter(|a| a.path.is_ident("cfg")).collect::<Vec<_>>(); field.attrs.iter().filter(|a| a.path().is_ident("cfg")).collect::<Vec<_>>();
let field_name = &field.ident; let field_name = &field.ident;
let header_name_string = header_name.to_string(); let header_name_string = header_name.to_string();
@ -211,7 +211,7 @@ fn vars<'a>(
.map(|field| { .map(|field| {
let field_name = field.ident.as_ref().expect("expected field to have an identifier"); let field_name = field.ident.as_ref().expect("expected field to have an identifier");
let cfg_attrs = let cfg_attrs =
field.attrs.iter().filter(|a| a.path.is_ident("cfg")).collect::<Vec<_>>(); field.attrs.iter().filter(|a| a.path().is_ident("cfg")).collect::<Vec<_>>();
let decl = quote! { let decl = quote! {
#( #cfg_attrs )* #( #cfg_attrs )*

View File

@ -165,7 +165,7 @@ fn struct_init_fields<'a>(
.map(|field| { .map(|field| {
let field_name = field.ident.as_ref().expect("expected field to have an identifier"); let field_name = field.ident.as_ref().expect("expected field to have an identifier");
let cfg_attrs = let cfg_attrs =
field.attrs.iter().filter(|a| a.path.is_ident("cfg")).collect::<Vec<_>>(); field.attrs.iter().filter(|a| a.path().is_ident("cfg")).collect::<Vec<_>>();
quote! { quote! {
#( #cfg_attrs )* #( #cfg_attrs )*

View File

@ -61,7 +61,7 @@ pub fn expand_derive_response(input: DeriveInput) -> syn::Result<TokenStream> {
let mut manual_body_serde = false; let mut manual_body_serde = false;
let mut error_ty = None; let mut error_ty = None;
for attr in input.attrs { for attr in input.attrs {
if !attr.path.is_ident("ruma_api") { if !attr.path().is_ident("ruma_api") {
continue; continue;
} }
@ -262,7 +262,7 @@ impl TryFrom<Field> for ResponseField {
} }
let (mut api_attrs, attrs) = let (mut api_attrs, attrs) =
field.attrs.into_iter().partition::<Vec<_>, _>(|attr| attr.path.is_ident("ruma_api")); field.attrs.into_iter().partition::<Vec<_>, _>(|attr| attr.path().is_ident("ruma_api"));
field.attrs = attrs; field.attrs = attrs;
let kind_attr = match api_attrs.as_slice() { let kind_attr = match api_attrs.as_slice() {

View File

@ -42,7 +42,7 @@ impl Response {
let field_name = let field_name =
field.ident.as_ref().expect("expected field to have an identifier"); field.ident.as_ref().expect("expected field to have an identifier");
let cfg_attrs = let cfg_attrs =
field.attrs.iter().filter(|a| a.path.is_ident("cfg")).collect::<Vec<_>>(); field.attrs.iter().filter(|a| a.path().is_ident("cfg")).collect::<Vec<_>>();
fields.push(match &response_field.kind { fields.push(match &response_field.kind {
ResponseFieldKind::Body | ResponseFieldKind::NewtypeBody => { ResponseFieldKind::Body | ResponseFieldKind::NewtypeBody => {

View File

@ -46,7 +46,7 @@ impl Response {
response_field.as_body_field().map(|field| { response_field.as_body_field().map(|field| {
let field_name = let field_name =
field.ident.as_ref().expect("expected field to have an identifier"); field.ident.as_ref().expect("expected field to have an identifier");
let cfg_attrs = field.attrs.iter().filter(|a| a.path.is_ident("cfg")); let cfg_attrs = field.attrs.iter().filter(|a| a.path().is_ident("cfg"));
quote! { quote! {
#( #cfg_attrs )* #( #cfg_attrs )*

View File

@ -9,7 +9,7 @@ use syn::{
parse::{Parse, ParseStream}, parse::{Parse, ParseStream},
parse_quote, parse_quote,
punctuated::Punctuated, punctuated::Punctuated,
DeriveInput, Field, Ident, LitStr, Meta, NestedMeta, Token, Type, DeriveInput, Field, Ident, LitStr, Meta, Token, Type,
}; };
use crate::util::m_prefix_name_to_type_name; use crate::util::m_prefix_name_to_type_name;
@ -277,7 +277,7 @@ pub fn expand_event_content(
let content_meta = input let content_meta = input
.attrs .attrs
.iter() .iter()
.filter(|attr| attr.path.is_ident("ruma_event")) .filter(|attr| attr.path().is_ident("ruma_event"))
.try_fold(ContentMeta::default(), |meta, attr| { .try_fold(ContentMeta::default(), |meta, attr| {
let list: Punctuated<ContentMeta, Token![,]> = let list: Punctuated<ContentMeta, Token![,]> =
attr.parse_args_with(Punctuated::parse_terminated)?; attr.parse_args_with(Punctuated::parse_terminated)?;
@ -404,7 +404,7 @@ fn generate_redacted_event_content<'a>(
.attrs .attrs
.iter() .iter()
.map(|a| -> syn::Result<_> { .map(|a| -> syn::Result<_> {
if a.path.is_ident("ruma_event") { if a.path().is_ident("ruma_event") {
if let EventFieldMeta::SkipRedaction = a.parse_args()? { if let EventFieldMeta::SkipRedaction = a.parse_args()? {
keep_field = true; keep_field = true;
} }
@ -526,7 +526,7 @@ fn generate_possibly_redacted_event_content<'a>(
.attrs .attrs
.iter() .iter()
.map(|a| -> syn::Result<_> { .map(|a| -> syn::Result<_> {
if a.path.is_ident("ruma_event") { if a.path().is_ident("ruma_event") {
// Keep the field if it is not redacted. // Keep the field if it is not redacted.
if let EventFieldMeta::SkipRedaction = a.parse_args()? { if let EventFieldMeta::SkipRedaction = a.parse_args()? {
keep_field = true; keep_field = true;
@ -535,27 +535,29 @@ fn generate_possibly_redacted_event_content<'a>(
// Don't re-emit our `ruma_event` attributes. // Don't re-emit our `ruma_event` attributes.
Ok(None) Ok(None)
} else { } else {
if a.path.is_ident("serde") { if a.path().is_ident("serde") {
let serde_meta = a.parse_meta()?; if let Meta::List(list) = &a.meta {
let nested: Punctuated<Meta, Token![,]> =
if let Meta::List(list) = serde_meta { list.parse_args_with(Punctuated::parse_terminated)?;
for meta in list.nested.iter().filter_map(|nested_meta| match nested_meta { for meta in &nested {
NestedMeta::Meta(meta) => Some(meta),
NestedMeta::Lit(_) => None,
}) {
if meta.path().is_ident("default") { if meta.path().is_ident("default") {
// Keep the field if it deserializes to its default value. // Keep the field if it deserializes to its default value.
keep_field = true; keep_field = true;
} else if !meta.path().is_ident("rename") && !meta.path().is_ident("alias") && unsupported_serde_attribute.is_none() { } else if !meta.path().is_ident("rename")
// Error if the field is not kept and uses an unsupported serde attribute. && !meta.path().is_ident("alias")
unsupported_serde_attribute = Some( && unsupported_serde_attribute.is_none()
syn::Error::new_spanned( {
// Error if the field is not kept and uses an unsupported
// serde attribute.
unsupported_serde_attribute =
Some(syn::Error::new_spanned(
meta, meta,
"Can't generate PossiblyRedacted struct with unsupported serde attribute\n\ "Can't generate PossiblyRedacted struct with \
unsupported serde attribute\n\
Expected one of `default`, `rename` or `alias`\n\ Expected one of `default`, `rename` or `alias`\n\
Use the `custom_possibly_redacted` attribute and create the struct manually" Use the `custom_possibly_redacted` attribute \
) and create the struct manually",
); ));
} }
} }
} }
@ -584,7 +586,7 @@ fn generate_possibly_redacted_event_content<'a>(
field_changed = true; field_changed = true;
let old_type = &f.ty; let old_type = &f.ty;
let ty = parse_quote!{ Option<#old_type> }; let ty = parse_quote! { Option<#old_type> };
attrs.push(parse_quote! { #[serde(skip_serializing_if = "Option::is_none")] }); attrs.push(parse_quote! { #[serde(skip_serializing_if = "Option::is_none")] });
Ok(Field { attrs, ty, ..f.clone() }) Ok(Field { attrs, ty, ..f.clone() })
@ -805,12 +807,12 @@ fn generate_event_content_impl<'a>(
.map(|type_prefix| { .map(|type_prefix| {
let type_fragment_field = fields let type_fragment_field = fields
.find_map(|f| { .find_map(|f| {
f.attrs.iter().filter(|a| a.path.is_ident("ruma_event")).find_map(|a| { f.attrs.iter().filter(|a| a.path().is_ident("ruma_event")).find_map(|a| match a
match a.parse_args() { .parse_args()
Ok(EventFieldMeta::TypeFragment) => Some(Ok(f)), {
Ok(_) => None, Ok(EventFieldMeta::TypeFragment) => Some(Ok(f)),
Err(e) => Some(Err(e)), Ok(_) => None,
} Err(e) => Some(Err(e)),
}) })
}) })
.transpose()? .transpose()?
@ -923,7 +925,7 @@ fn generate_event_content_impl<'a>(
}; };
let fields_without_type_fragment = fields.filter(|f| { let fields_without_type_fragment = fields.filter(|f| {
!f.attrs.iter().any(|a| { !f.attrs.iter().any(|a| {
a.path.is_ident("ruma_event") && matches!(a.parse_args(), Ok(EventFieldMeta::TypeFragment)) a.path().is_ident("ruma_event") && matches!(a.parse_args(), Ok(EventFieldMeta::TypeFragment))
}) })
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
let fields_ident_without_type_fragment = fields_without_type_fragment.iter().filter_map(|f| f.ident.as_ref()); let fields_ident_without_type_fragment = fields_without_type_fragment.iter().filter_map(|f| f.ident.as_ref());

View File

@ -235,7 +235,7 @@ impl Parse for EventEnumEntry {
let (ruma_enum_attrs, attrs) = input let (ruma_enum_attrs, attrs) = input
.call(Attribute::parse_outer)? .call(Attribute::parse_outer)?
.into_iter() .into_iter()
.partition::<Vec<_>, _>(|attr| attr.path.is_ident("ruma_enum")); .partition::<Vec<_>, _>(|attr| attr.path().is_ident("ruma_enum"));
let ev_type: LitStr = input.parse()?; let ev_type: LitStr = input.parse()?;
let _: Token![=>] = input.parse()?; let _: Token![=>] = input.parse()?;
let ev_path = input.call(Path::parse_mod_style)?; let ev_path = input.call(Path::parse_mod_style)?;
@ -296,7 +296,7 @@ impl Parse for EventEnumInput {
let content; let content;
braced!(content in input); braced!(content in input);
let events = content.parse_terminated::<_, Token![,]>(EventEnumEntry::parse)?; let events = content.parse_terminated(EventEnumEntry::parse, Token![,])?;
let events = events.into_iter().collect(); let events = events.into_iter().collect();
enums.push(EventEnumDecl { attrs, kind, events }); enums.push(EventEnumDecl { attrs, kind, events });
} }

View File

@ -29,7 +29,7 @@ pub fn expand_id_zst(input: ItemStruct) -> syn::Result<TokenStream> {
let owned_decl = expand_owned_id(&input); let owned_decl = expand_owned_id(&input);
let meta = input.attrs.iter().filter(|attr| attr.path.is_ident("ruma_id")).try_fold( let meta = input.attrs.iter().filter(|attr| attr.path().is_ident("ruma_id")).try_fold(
IdZstMeta::default(), IdZstMeta::default(),
|meta, attr| { |meta, attr| {
let list: Punctuated<IdZstMeta, Token![,]> = let list: Punctuated<IdZstMeta, Token![,]> =

View File

@ -10,7 +10,7 @@ pub fn get_rename_rule(input: &ItemEnum) -> syn::Result<RenameRule> {
let rules: Vec<_> = input let rules: Vec<_> = input
.attrs .attrs
.iter() .iter()
.filter(|attr| attr.path.is_ident("ruma_enum")) .filter(|attr| attr.path().is_ident("ruma_enum"))
.map(|attr| attr.parse_args::<RenameAllAttr>().map(RenameAllAttr::into_inner)) .map(|attr| attr.parse_args::<RenameAllAttr>().map(RenameAllAttr::into_inner))
.collect::<syn::Result<_>>()?; .collect::<syn::Result<_>>()?;
@ -28,7 +28,7 @@ pub fn get_enum_attributes(input: &Variant) -> syn::Result<EnumAttrs> {
let mut attributes = EnumAttrs::default(); let mut attributes = EnumAttrs::default();
for attr in &input.attrs { for attr in &input.attrs {
if !attr.path.is_ident("ruma_enum") { if !attr.path().is_ident("ruma_enum") {
continue; continue;
} }