Add macro-braces, disallowed-type and import-rename clippy lints

This commit is contained in:
Devin Ragotzy 2021-07-22 08:34:30 -04:00 committed by GitHub
parent 2e88e48eb3
commit 3101be1f99
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 50 additions and 22 deletions

View File

@ -9,14 +9,17 @@ ruma-clippy = """\
-W clippy::branches_sharing_code \ -W clippy::branches_sharing_code \
-W clippy::cloned_instead_of_copied \ -W clippy::cloned_instead_of_copied \
-W clippy::dbg_macro \ -W clippy::dbg_macro \
-W clippy::disallowed_type \
-W clippy::empty_line_after_outer_attr \ -W clippy::empty_line_after_outer_attr \
-W clippy::exhaustive_structs \ -W clippy::exhaustive_structs \
-W clippy::inefficient_to_string \ -W clippy::inefficient_to_string \
-W clippy::macro_use_imports \ -W clippy::macro_use_imports \
-W clippy::map_flatten \ -W clippy::map_flatten \
-W clippy::missing_enforced_import_renames \
-W clippy::mut_mut \ -W clippy::mut_mut \
-W clippy::needless_borrow \ -W clippy::needless_borrow \
-A clippy::new_without_default \ -A clippy::new_without_default \
-W clippy::nonstandard_macro_braces \
-W clippy::str_to_string \ -W clippy::str_to_string \
-W clippy::todo \ -W clippy::todo \
-W clippy::unreadable_literal \ -W clippy::unreadable_literal \

View File

@ -1 +1,12 @@
msrv = "1.50" msrv = "1.50"
disallowed-types = [
"std::collections::HashMap",
"std::collections::HashSet",
]
enforced-import-renames = [
{ path = "serde_json::Value", rename = "JsonValue" },
]
standard-macro-braces = [
{ name = "quote", brace = "{" },
{ name = "quote::quote", brace = "{" },
]

View File

@ -152,7 +152,7 @@ impl Request {
let (derive_deserialize, lifetimes) = if self.has_body_lifetimes() { let (derive_deserialize, lifetimes) = if self.has_body_lifetimes() {
(TokenStream::new(), self.body_lifetimes()) (TokenStream::new(), self.body_lifetimes())
} else { } else {
(quote!(#serde::Deserialize), TokenStream::new()) (quote! { #serde::Deserialize }, TokenStream::new())
}; };
Some((derive_deserialize, quote! { #lifetimes (#field); })) Some((derive_deserialize, quote! { #lifetimes (#field); }))
@ -161,7 +161,7 @@ impl Request {
let (derive_deserialize, lifetimes) = if self.has_body_lifetimes() { let (derive_deserialize, lifetimes) = if self.has_body_lifetimes() {
(TokenStream::new(), self.body_lifetimes()) (TokenStream::new(), self.body_lifetimes())
} else { } else {
(quote!(#serde::Deserialize), TokenStream::new()) (quote! { #serde::Deserialize }, TokenStream::new())
}; };
let fields = fields.map(RequestField::field); let fields = fields.map(RequestField::field);
@ -187,7 +187,7 @@ impl Request {
let (derive_deserialize, lifetime) = if self.has_query_lifetimes() { let (derive_deserialize, lifetime) = if self.has_query_lifetimes() {
(TokenStream::new(), self.query_lifetimes()) (TokenStream::new(), self.query_lifetimes())
} else { } else {
(quote!(#serde::Deserialize), TokenStream::new()) (quote! { #serde::Deserialize }, TokenStream::new())
}; };
quote! { quote! {
@ -205,7 +205,7 @@ impl Request {
let (derive_deserialize, lifetime) = if self.has_query_lifetimes() { let (derive_deserialize, lifetime) = if self.has_query_lifetimes() {
(TokenStream::new(), self.query_lifetimes()) (TokenStream::new(), self.query_lifetimes())
} else { } else {
(quote!(#serde::Deserialize), TokenStream::new()) (quote! { #serde::Deserialize }, TokenStream::new())
}; };
quote! { quote! {

View File

@ -18,8 +18,11 @@ impl Request {
let method = &metadata.method; let method = &metadata.method;
let incoming_request_type = let incoming_request_type = if self.contains_lifetimes() {
if self.contains_lifetimes() { quote!(IncomingRequest) } else { quote!(Request) }; quote! { IncomingRequest }
} else {
quote! { Request }
};
// FIXME: the rest of the field initializer expansions are gated `cfg(...)` // FIXME: the rest of the field initializer expansions are gated `cfg(...)`
// except this one. If we get errors about missing fields in IncomingRequest for // except this one. If we get errors about missing fields in IncomingRequest for
@ -86,7 +89,7 @@ impl Request {
}, },
) )
} else if self.has_query_fields() { } else if self.has_query_fields() {
let (decls, names) = self.vars(RequestFieldKind::Query, quote!(request_query)); let (decls, names) = self.vars(RequestFieldKind::Query, quote! { request_query });
let parse = quote! { let parse = quote! {
let request_query: <RequestQuery as #ruma_serde::Outgoing>::Incoming = let request_query: <RequestQuery as #ruma_serde::Outgoing>::Incoming =
@ -212,7 +215,7 @@ impl Request {
(parse, quote! { #field_name, }) (parse, quote! { #field_name, })
} else { } else {
self.vars(RequestFieldKind::Body, quote!(request_body)) self.vars(RequestFieldKind::Body, quote! { request_body })
}; };
let non_auth_impls = metadata.authentication.iter().filter_map(|auth| { let non_auth_impls = metadata.authentication.iter().filter_map(|auth| {

View File

@ -55,7 +55,7 @@ impl Request {
let request_query_string = if let Some(field) = self.query_map_field() { let request_query_string = if let Some(field) = self.query_map_field() {
let field_name = field.ident.as_ref().expect("expected field to have identifier"); let field_name = field.ident.as_ref().expect("expected field to have identifier");
quote!({ quote! {{
// This function exists so that the compiler will throw an error when the type of // This function exists so that the compiler will throw an error when the type of
// the field with the query_map attribute doesn't implement // the field with the query_map attribute doesn't implement
// `IntoIterator<Item = (String, String)>`. // `IntoIterator<Item = (String, String)>`.
@ -80,12 +80,12 @@ impl Request {
"?{}", "?{}",
#ruma_serde::urlencoded::to_string(request_query)? #ruma_serde::urlencoded::to_string(request_query)?
) )
}) }}
} else if self.has_query_fields() { } else if self.has_query_fields() {
let request_query_init_fields = let request_query_init_fields =
self.struct_init_fields(RequestFieldKind::Query, quote!(self)); self.struct_init_fields(RequestFieldKind::Query, quote! { self });
quote!({ quote! {{
let request_query = RequestQuery { let request_query = RequestQuery {
#request_query_init_fields #request_query_init_fields
}; };
@ -94,7 +94,7 @@ impl Request {
"?{}", "?{}",
#ruma_serde::urlencoded::to_string(request_query)? #ruma_serde::urlencoded::to_string(request_query)?
) )
}) }}
} else { } else {
quote! { "" } quote! { "" }
}; };
@ -174,7 +174,7 @@ impl Request {
field.ident.as_ref().expect("expected field to have an identifier"); field.ident.as_ref().expect("expected field to have an identifier");
quote! { (self.#field_name) } quote! { (self.#field_name) }
} else { } else {
let initializers = self.struct_init_fields(RequestFieldKind::Body, quote!(self)); let initializers = self.struct_init_fields(RequestFieldKind::Body, quote! { self });
quote! { { #initializers } } quote! { { #initializers } }
}; };

View File

@ -127,7 +127,7 @@ fn expand_any_with_deser(
events.iter().map(|event| to_event_path(event, &event_struct, ruma_events)).collect(); events.iter().map(|event| to_event_path(event, &event_struct, ruma_events)).collect();
let variant_decls = variants.iter().map(|v| v.decl()); let variant_decls = variants.iter().map(|v| v.decl());
let self_variants = variants.iter().map(|v| v.ctor(quote!(Self))); let self_variants = variants.iter().map(|v| v.ctor(quote! { Self }));
let (custom_variant, custom_deserialize) = let (custom_variant, custom_deserialize) =
generate_custom_variant(&event_struct, var, ruma_events); generate_custom_variant(&event_struct, var, ruma_events);
@ -233,7 +233,7 @@ fn expand_conversion_impl(
let sync_struct = kind.to_event_ident(&variation)?; let sync_struct = kind.to_event_ident(&variation)?;
let ident_variants = variants.iter().map(|v| v.match_arm(&ident)); let ident_variants = variants.iter().map(|v| v.match_arm(&ident));
let self_variants = variants.iter().map(|v| v.ctor(quote!(Self))); let self_variants = variants.iter().map(|v| v.ctor(quote! { Self }));
let redaction = let redaction =
(*kind == EventKind::Message && *var == EventKindVariation::Full).then(|| { (*kind == EventKind::Message && *var == EventKindVariation::Full).then(|| {
@ -270,7 +270,7 @@ fn expand_conversion_impl(
}; };
let full = kind.to_event_enum_ident(&variation)?; let full = kind.to_event_enum_ident(&variation)?;
let self_variants = variants.iter().map(|v| v.match_arm(quote!(Self))); let self_variants = variants.iter().map(|v| v.match_arm(quote! { Self }));
let full_variants = variants.iter().map(|v| v.ctor(&full)); let full_variants = variants.iter().map(|v| v.ctor(&full));
let redaction = let redaction =
@ -395,8 +395,8 @@ fn expand_content_enum(
let attrs = &v.attrs; let attrs = &v.attrs;
quote! { #(#attrs)* } quote! { #(#attrs)* }
}); });
let variant_arms = variants.iter().map(|v| v.match_arm(quote!(Self))).collect::<Vec<_>>(); let variant_arms = variants.iter().map(|v| v.match_arm(quote! { Self })).collect::<Vec<_>>();
let variant_ctors = variants.iter().map(|v| v.ctor(quote!(Self))); let variant_ctors = variants.iter().map(|v| v.ctor(quote! { Self }));
let event_content_impl = quote! { let event_content_impl = quote! {
#[automatically_derived] #[automatically_derived]
@ -530,7 +530,7 @@ fn expand_redact(
_ => return None, _ => return None,
}; };
let self_variants = variants.iter().map(|v| v.match_arm(quote!(Self))); let self_variants = variants.iter().map(|v| v.match_arm(quote! { Self }));
let redaction_variants = variants.iter().map(|v| v.ctor(&redacted_enum)); let redaction_variants = variants.iter().map(|v| v.ctor(&redacted_enum));
let fields = EVENT_FIELDS.iter().map(|(name, has_field)| { let fields = EVENT_FIELDS.iter().map(|(name, has_field)| {
@ -738,7 +738,7 @@ fn accessor_methods(
let content_enum = kind.to_content_enum(); let content_enum = kind.to_content_enum();
let self_variants: Vec<_> = variants.iter().map(|v| v.match_arm(quote!(Self))).collect(); let self_variants: Vec<_> = variants.iter().map(|v| v.match_arm(quote! { Self })).collect();
let content_variants: Vec<_> = variants.iter().map(|v| v.ctor(&content_enum)).collect(); let content_variants: Vec<_> = variants.iter().map(|v| v.ctor(&content_enum)).collect();
let event_type = quote! { let event_type = quote! {
@ -950,7 +950,7 @@ fn generate_accessor(
let docs = format!("Returns this event's {} field.", name); let docs = format!("Returns this event's {} field.", name);
let ident = Ident::new(name, Span::call_site()); let ident = Ident::new(name, Span::call_site());
let field_type = field_return_type(name, var, ruma_events); let field_type = field_return_type(name, var, ruma_events);
let variants = variants.iter().map(|v| v.match_arm(quote!(Self))); let variants = variants.iter().map(|v| v.match_arm(quote! { Self }));
quote! { quote! {
#[doc = #docs] #[doc = #docs]

View File

@ -958,6 +958,7 @@ fn get_plain_quote_fallback(original_message: &MessageEvent) -> String {
} }
} }
#[allow(clippy::nonstandard_macro_braces)]
fn get_html_quote_fallback(original_message: &MessageEvent) -> String { fn get_html_quote_fallback(original_message: &MessageEvent) -> String {
match &original_message.content.msgtype { match &original_message.content.msgtype {
MessageType::Audio(_) => { MessageType::Audio(_) => {

View File

@ -0,0 +1,10 @@
msrv = "1.50"
disallowed-types = []
enforced-import-renames = [ { path = "serde_json::Value", rename = "JsonValue" } ]
standard-macro-braces = [
{ name = "btreeset", brace = "[" },
# The macro calls itself like btreemap!(...) so this triggers for any use
# { name = "btreemap", brace = "{" },
{ name = "hashset", brace = "[" },
{ name = "hashmap", brace = "{" },
]