Clippy cleanup

This commit is contained in:
sunli 2020-03-21 09:32:13 +08:00
parent ee1e6caac4
commit 2b2be34d4d
40 changed files with 364 additions and 271 deletions

View File

@ -11,10 +11,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Check with clippy
run: cargo clippy
- name: Check format
run: cargo fmt --all -- --check
- name: Check with clippy
run: cargo clippy --all
- name: Build
run: cargo build --all --verbose
- name: Run tests

View File

@ -114,7 +114,7 @@ where
Box::pin(async move {
if req.method() == Method::GET {
if enable_subscription {
if let Some(s) = req.headers().get(&header::UPGRADE) {
if let Some(s) = req.headers().get(header::UPGRADE) {
if let Ok(s) = s.to_str() {
if s.to_ascii_lowercase().contains("websocket") {
return ws::start_with_protocols(
@ -168,14 +168,14 @@ where
let mut gql_request = {
let data = read_multipart(&mut multipart, "operations").await?;
serde_json::from_slice::<GQLRequest>(&data)
.map_err(|err| actix_web::error::ErrorBadRequest(err))?
.map_err(actix_web::error::ErrorBadRequest)?
};
// read map
let mut map = {
let data = read_multipart(&mut multipart, "map").await?;
serde_json::from_slice::<HashMap<String, Vec<String>>>(&data)
.map_err(|err| actix_web::error::ErrorBadRequest(err))?
.map_err(actix_web::error::ErrorBadRequest)?
};
let mut query = match gql_request.prepare(schema) {
@ -202,8 +202,7 @@ where
let content_type = field.content_type().to_string();
let mut data = BytesMut::new();
while let Some(part) = field.next().await {
let part =
part.map_err(|err| actix_web::error::ErrorBadRequest(err))?;
let part = part.map_err(actix_web::error::ErrorBadRequest)?;
data.extend(&part);
if data.len() > max_file_size {
@ -261,7 +260,7 @@ where
}
fn get_content_type(headers: &HeaderMap) -> actix_web::Result<Mime> {
if let Some(content_type) = headers.get(&header::CONTENT_TYPE) {
if let Some(content_type) = headers.get(header::CONTENT_TYPE) {
if let Ok(content_type) = content_type.to_str() {
if let Ok(ct) = content_type.parse::<Mime>() {
return Ok(ct);
@ -287,7 +286,7 @@ async fn read_multipart(multipart: &mut Multipart, name: &str) -> actix_web::Res
let mut data = BytesMut::new();
while let Some(part) = field.next().await {
let part = part.map_err(|err| actix_web::error::ErrorBadRequest(err))?;
let part = part.map_err(actix_web::error::ErrorBadRequest)?;
data.extend(&part);
}
data

View File

@ -1,5 +1,7 @@
use crate::utils::parse_value;
use crate::utils::{parse_validators, parse_value};
use graphql_parser::query::Value;
use proc_macro2::TokenStream;
use quote::quote;
use syn::{Attribute, AttributeArgs, Error, Meta, MetaList, NestedMeta, Result, Type};
#[derive(Debug)]
@ -58,6 +60,7 @@ pub struct Argument {
pub name: Option<String>,
pub desc: Option<String>,
pub default: Option<Value>,
pub validators: TokenStream,
}
impl Argument {
@ -65,59 +68,59 @@ impl Argument {
let mut name = None;
let mut desc = None;
let mut default = None;
let mut validators = quote! { Default::default() };
for attr in attrs {
match attr.parse_meta() {
Ok(Meta::List(ls)) if ls.path.is_ident("arg") => {
for meta in &ls.nested {
match meta {
NestedMeta::Meta(Meta::NameValue(nv)) => {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = &nv.lit {
name = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'name' should be a string.",
));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = &nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'desc' should be a string.",
));
}
} else if nv.path.is_ident("default") {
if let syn::Lit::Str(lit) = &nv.lit {
match parse_value(&lit.value()) {
Ok(Value::Variable(_)) => {
return Err(Error::new_spanned(
&nv.lit,
"The default cannot be a variable",
))
}
Ok(value) => default = Some(value),
Err(err) => {
return Err(Error::new_spanned(
&nv.lit,
format!("Invalid value: {}", err),
));
}
if let NestedMeta::Meta(Meta::NameValue(nv)) = meta {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = &nv.lit {
name = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'name' should be a string.",
));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = &nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'desc' should be a string.",
));
}
} else if nv.path.is_ident("default") {
if let syn::Lit::Str(lit) = &nv.lit {
match parse_value(&lit.value()) {
Ok(Value::Variable(_)) => {
return Err(Error::new_spanned(
&nv.lit,
"The default cannot be a variable",
))
}
Ok(value) => default = Some(value),
Err(err) => {
return Err(Error::new_spanned(
&nv.lit,
format!("Invalid value: {}", err),
));
}
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'default' should be a string.",
));
}
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'default' should be a string.",
));
}
}
_ => {}
}
}
validators = parse_validators(&ls)?;
}
_ => {}
}
@ -127,6 +130,7 @@ impl Argument {
name,
desc,
default,
validators,
})
}
}
@ -153,38 +157,35 @@ impl Field {
Ok(Meta::List(ls)) if ls.path.is_ident("field") => {
is_field = true;
for meta in &ls.nested {
match meta {
NestedMeta::Meta(Meta::NameValue(nv)) => {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = &nv.lit {
name = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'name' should be a string.",
));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = &nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'desc' should be a string.",
));
}
} else if nv.path.is_ident("deprecation") {
if let syn::Lit::Str(lit) = &nv.lit {
deprecation = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'deprecation' should be a string.",
));
}
if let NestedMeta::Meta(Meta::NameValue(nv)) = meta {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = &nv.lit {
name = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'name' should be a string.",
));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = &nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'desc' should be a string.",
));
}
} else if nv.path.is_ident("deprecation") {
if let syn::Lit::Str(lit) = &nv.lit {
deprecation = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'deprecation' should be a string.",
));
}
}
_ => {}
}
}
}
@ -272,38 +273,35 @@ impl EnumItem {
if attr.path.is_ident("item") {
if let Ok(Meta::List(args)) = attr.parse_meta() {
for meta in args.nested {
match meta {
NestedMeta::Meta(Meta::NameValue(nv)) => {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = nv.lit {
name = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'name' should be a string.",
));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'desc' should be a string.",
));
}
} else if nv.path.is_ident("deprecation") {
if let syn::Lit::Str(lit) = nv.lit {
deprecation = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'deprecation' should be a string.",
));
}
if let NestedMeta::Meta(Meta::NameValue(nv)) = meta {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = nv.lit {
name = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'name' should be a string.",
));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'desc' should be a string.",
));
}
} else if nv.path.is_ident("deprecation") {
if let syn::Lit::Str(lit) = nv.lit {
deprecation = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'deprecation' should be a string.",
));
}
}
_ => {}
}
}
}
@ -324,6 +322,7 @@ pub struct InputField {
pub name: Option<String>,
pub desc: Option<String>,
pub default: Option<Value>,
pub validators: TokenStream,
}
impl InputField {
@ -332,18 +331,19 @@ impl InputField {
let mut name = None;
let mut desc = None;
let mut default = None;
let mut validators = quote! { Default::default() };
for attr in attrs {
if attr.path.is_ident("field") {
if let Ok(Meta::List(args)) = attr.parse_meta() {
for meta in args.nested {
if let Ok(Meta::List(args)) = &attr.parse_meta() {
for meta in &args.nested {
match meta {
NestedMeta::Meta(Meta::Path(p)) if p.is_ident("internal") => {
internal = true;
}
NestedMeta::Meta(Meta::NameValue(nv)) => {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = nv.lit {
if let syn::Lit::Str(lit) = &nv.lit {
name = Some(lit.value());
} else {
return Err(Error::new_spanned(
@ -352,7 +352,7 @@ impl InputField {
));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = nv.lit {
if let syn::Lit::Str(lit) = &nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
@ -361,7 +361,7 @@ impl InputField {
));
}
} else if nv.path.is_ident("default") {
if let syn::Lit::Str(lit) = nv.lit {
if let syn::Lit::Str(lit) = &nv.lit {
match parse_value(&lit.value()) {
Ok(Value::Variable(_)) => {
return Err(Error::new_spanned(
@ -388,6 +388,8 @@ impl InputField {
_ => {}
}
}
validators = parse_validators(&args)?;
}
}
}
@ -397,6 +399,7 @@ impl InputField {
name,
desc,
default,
validators,
})
}
}
@ -468,65 +471,62 @@ impl InterfaceFieldArgument {
let mut default = None;
for meta in &ls.nested {
match meta {
NestedMeta::Meta(Meta::NameValue(nv)) => {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = &nv.lit {
name = Some(lit.value());
if let NestedMeta::Meta(Meta::NameValue(nv)) = meta {
if nv.path.is_ident("name") {
if let syn::Lit::Str(lit) = &nv.lit {
name = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'name' should be a string.",
));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = &nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'desc' should be a string.",
));
}
} else if nv.path.is_ident("type") {
if let syn::Lit::Str(lit) = &nv.lit {
if let Ok(ty2) = syn::parse_str::<syn::Type>(&lit.value()) {
ty = Some(ty2);
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'name' should be a string.",
));
return Err(Error::new_spanned(&lit, "Expect type"));
}
} else if nv.path.is_ident("desc") {
if let syn::Lit::Str(lit) = &nv.lit {
desc = Some(lit.value());
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'desc' should be a string.",
));
}
} else if nv.path.is_ident("type") {
if let syn::Lit::Str(lit) = &nv.lit {
if let Ok(ty2) = syn::parse_str::<syn::Type>(&lit.value()) {
ty = Some(ty2);
} else {
return Err(Error::new_spanned(&lit, "Expect type"));
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'type' should be a string.",
));
}
} else if nv.path.is_ident("default") {
if let syn::Lit::Str(lit) = &nv.lit {
match parse_value(&lit.value()) {
Ok(Value::Variable(_)) => {
return Err(Error::new_spanned(
&nv.lit,
"The default cannot be a variable",
))
}
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'type' should be a string.",
));
}
} else if nv.path.is_ident("default") {
if let syn::Lit::Str(lit) = &nv.lit {
match parse_value(&lit.value()) {
Ok(Value::Variable(_)) => {
return Err(Error::new_spanned(
&nv.lit,
"The default cannot be a variable",
))
}
Ok(value) => default = Some(value),
Err(err) => {
return Err(Error::new_spanned(
&nv.lit,
format!("Invalid value: {}", err),
));
}
Ok(value) => default = Some(value),
Err(err) => {
return Err(Error::new_spanned(
&nv.lit,
format!("Invalid value: {}", err),
));
}
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'default' should be a string.",
));
}
} else {
return Err(Error::new_spanned(
&nv.lit,
"Attribute 'default' should be a string.",
));
}
}
_ => {}
}
}

View File

@ -94,6 +94,7 @@ pub fn generate(object_args: &args::InputObject, input: &DeriveInput) -> Result<
description: #desc,
ty: <#ty as #crate_name::Type>::create_type_info(registry),
default_value: #default,
validators: Default::default(),
}
})
}

View File

@ -137,6 +137,7 @@ pub fn generate(interface_args: &args::Interface, input: &DeriveInput) -> Result
description: #desc,
ty: <#ty as #crate_name::Type>::create_type_info(registry),
default_value: #schema_default,
validators: Default::default(),
});
});
}

View File

@ -1,3 +1,5 @@
#![allow(clippy::cognitive_complexity)]
extern crate proc_macro;
mod args;

View File

@ -87,8 +87,7 @@ pub fn generate(object_args: &args::Object, item_impl: &mut ItemImpl) -> Result<
(_, Type::Reference(TypeReference { elem, .. })) => {
if let Type::Path(path) = elem.as_ref() {
if idx != 1
|| path.path.segments.last().unwrap().ident.to_string()
!= "Context"
|| path.path.segments.last().unwrap().ident != "Context"
{
return Err(Error::new_spanned(
arg,
@ -114,6 +113,7 @@ pub fn generate(object_args: &args::Object, item_impl: &mut ItemImpl) -> Result<
name,
desc,
default,
validators,
},
) in args
{
@ -138,6 +138,7 @@ pub fn generate(object_args: &args::Object, item_impl: &mut ItemImpl) -> Result<
description: #desc,
ty: <#ty as #crate_name::Type>::create_type_info(registry),
default_value: #schema_default,
validators: #validators,
});
});
@ -171,9 +172,10 @@ pub fn generate(object_args: &args::Object, item_impl: &mut ItemImpl) -> Result<
});
});
let ctx_field = match arg_ctx {
true => quote! { &ctx, },
false => quote! {},
let ctx_field = if arg_ctx {
quote! { &ctx, }
} else {
quote! {}
};
let field_ident = &method.sig.ident;

View File

@ -12,7 +12,7 @@ impl<'a> OutputType<'a> {
let ty = if let Type::Path(p) = input {
if p.path.segments.last().unwrap().ident == "Result" {
if let PathArguments::AngleBracketed(args) = &p.path.segments[0].arguments {
if args.args.len() == 0 {
if args.args.is_empty() {
return Err(Error::new_spanned(input, "Invalid type"));
}
let mut res = None;

View File

@ -137,6 +137,7 @@ pub fn generate(object_args: &args::Object, item_impl: &mut ItemImpl) -> Result<
name,
desc,
default,
validators,
},
) in args
{
@ -161,6 +162,7 @@ pub fn generate(object_args: &args::Object, item_impl: &mut ItemImpl) -> Result<
description: #desc,
ty: <#ty as #crate_name::Type>::create_type_info(registry),
default_value: #schema_default,
validators: #validators,
});
});

View File

@ -2,15 +2,14 @@ use graphql_parser::parse_query;
use graphql_parser::query::{Definition, OperationDefinition, ParseError, Query, Value};
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::{Error, Ident, Result};
use syn::{Error, Ident, Meta, MetaList, NestedMeta, Result};
pub fn get_crate_name(internal: bool) -> TokenStream {
match internal {
true => quote! { crate },
false => {
let id = Ident::new("async_graphql", Span::call_site());
quote! { #id }
}
if internal {
quote! { crate }
} else {
let id = Ident::new("async_graphql", Span::call_site());
quote! { #id }
}
}
@ -97,3 +96,30 @@ pub fn check_reserved_name(name: &str, internal: bool) -> Result<()> {
Ok(())
}
}
pub fn parse_validators(args: &MetaList) -> Result<TokenStream> {
let mut validators = Vec::new();
for arg in &args.nested {
if let NestedMeta::Meta(Meta::List(ls)) = arg {
if ls.path.is_ident("validator") {
let mut ty = None;
let mut params = Vec::new();
for item in &ls.nested {
match item {
NestedMeta::Meta(Meta::Path(p)) => {
ty = Some(p);
}
NestedMeta::Meta(Meta::NameValue(nv)) => {
let name = &nv.path;
let value = &nv.lit;
params.push(quote! { #name: #value });
}
_ => {}
}
}
validators.push(quote! { Box::new(#ty { #(#params),* }) });
}
}
}
Ok(quote! { std::sync::Arc::new(vec![#(#validators)*]) })
}

View File

@ -25,7 +25,7 @@ pub trait Type {
/// Parse `GlobalID`.
fn from_global_id(id: ID) -> Result<ID> {
let v: Vec<&str> = id.splitn(2, ":").collect();
let v: Vec<&str> = id.splitn(2, ':').collect();
if v.len() != 2 {
return Err(QueryError::InvalidGlobalID.into());
}
@ -155,6 +155,7 @@ macro_rules! impl_scalar_internal {
}
}
#[allow(clippy::ptr_arg)]
#[async_trait::async_trait]
impl crate::OutputValueType for $ty {
async fn resolve(
@ -191,6 +192,7 @@ macro_rules! impl_scalar {
}
}
#[allow(clippy::ptr_arg)]
#[async_graphql::async_trait::async_trait]
impl async_graphql::OutputValueType for $ty {
async fn resolve(
@ -215,6 +217,7 @@ impl<T: Type + Send + Sync> Type for &T {
#[async_trait::async_trait]
impl<T: OutputValueType + Send + Sync> OutputValueType for &T {
#[allow(clippy::trivially_copy_pass_by_ref)]
async fn resolve(value: &Self, ctx: &ContextSelectionSet<'_>) -> Result<serde_json::Value> {
T::resolve(*value, ctx).await
}

View File

@ -60,7 +60,7 @@ impl Variables {
content_type: Option<&str>,
content: Bytes,
) {
let mut it = var_path.split(".").peekable();
let mut it = var_path.split('.').peekable();
if let Some(first) = it.next() {
if first != "variables" {
@ -85,18 +85,16 @@ impl Variables {
return;
}
}
} else {
if let Value::Object(obj) = current {
if let Some(value) = obj.get_mut(s) {
if !has_next {
*value = Value::String(file_string(filename, content_type, &content));
return;
} else {
current = value;
}
} else {
} else if let Value::Object(obj) = current {
if let Some(value) = obj.get_mut(s) {
if !has_next {
*value = Value::String(file_string(filename, content_type, &content));
return;
} else {
current = value;
}
} else {
return;
}
}
}
@ -119,11 +117,9 @@ fn json_value_to_gql_value(value: serde_json::Value) -> Value {
serde_json::Value::Number(n) if n.is_f64() => Value::Float(n.as_f64().unwrap()),
serde_json::Value::Number(n) => Value::Int((n.as_i64().unwrap() as i32).into()),
serde_json::Value::String(s) => Value::String(s),
serde_json::Value::Array(ls) => Value::List(
ls.into_iter()
.map(|value| json_value_to_gql_value(value))
.collect(),
),
serde_json::Value::Array(ls) => {
Value::List(ls.into_iter().map(json_value_to_gql_value).collect())
}
serde_json::Value::Object(obj) => Value::Object(
obj.into_iter()
.map(|(name, value)| (name, json_value_to_gql_value(value)))
@ -172,7 +168,7 @@ impl<'a, T> ContextBase<'a, T> {
item,
variables: self.variables,
variable_definitions: self.variable_definitions,
registry: self.registry.clone(),
registry: self.registry,
data: self.data,
fragments: self.fragments,
}
@ -198,10 +194,10 @@ impl<'a, T> ContextBase<'a, T> {
return Ok(default.clone());
}
}
return Err(QueryError::VarNotDefined {
Err(QueryError::VarNotDefined {
var_name: name.to_string(),
}
.into());
.into())
}
fn resolve_input_value(&self, mut value: Value) -> Result<Value> {
@ -216,7 +212,7 @@ impl<'a, T> ContextBase<'a, T> {
Ok(value)
}
Value::Object(ref mut obj) => {
for (_, value) in obj {
for value in obj.values_mut() {
if let Value::Variable(var_name) = value {
*value = self.var_value(&var_name)?;
}

View File

@ -150,7 +150,6 @@ pub enum QueryError {
},
}
/// Creates a wrapper with an error location
#[allow(missing_docs)]
pub trait ErrorWithPosition {

View File

@ -58,6 +58,8 @@
//! * [GraphQL over WebSocket Protocol](https://github.com/apollographql/subscriptions-transport-ws/blob/master/PROTOCOL.md)
#![warn(missing_docs)]
#![allow(clippy::needless_doctest_main)]
#![allow(clippy::needless_lifetimes)]
#[macro_use]
extern crate thiserror;
@ -76,6 +78,9 @@ mod subscription;
mod types;
mod validation;
/// Input value validators
pub mod validators;
#[doc(hidden)]
pub use anyhow;
#[doc(hidden)]

View File

@ -167,14 +167,14 @@ impl<'a, Query, Mutation> PreparedQuery<'a, Query, Mutation> {
item: &self.selection_set,
variables: &self.variables,
variable_definitions: self.variable_definitions.as_deref(),
registry: self.registry.clone(),
registry: self.registry,
data: self.data,
fragments: &self.fragments,
};
match self.root {
Root::Query(query) => return OutputValueType::resolve(query, &ctx).await,
Root::Mutation(mutation) => return OutputValueType::resolve(mutation, &ctx).await,
Root::Query(query) => OutputValueType::resolve(query, &ctx).await,
Root::Mutation(mutation) => OutputValueType::resolve(mutation, &ctx).await,
}
}
}

View File

@ -1,9 +1,11 @@
use crate::validators::InputValueValidator;
use crate::{model, Value};
use graphql_parser::query::Type as ParsedType;
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
fn parse_non_null(type_name: &str) -> Option<&str> {
if type_name.ends_with("!") {
if type_name.ends_with('!') {
Some(&type_name[..type_name.len() - 1])
} else {
None
@ -11,7 +13,7 @@ fn parse_non_null(type_name: &str) -> Option<&str> {
}
fn parse_list(type_name: &str) -> Option<&str> {
if type_name.starts_with("[") {
if type_name.starts_with('[') {
Some(&type_name[1..type_name.len() - 1])
} else {
None
@ -58,6 +60,7 @@ pub struct InputValue {
pub description: Option<&'static str>,
pub ty: String,
pub default_value: Option<&'static str>,
pub validators: Arc<Vec<Box<dyn InputValueValidator>>>,
}
#[derive(Clone)]

View File

@ -108,7 +108,7 @@ pub async fn do_resolve_values<'a, T: ObjectType + Send + Sync>(
Resolver {
ctx,
obj: root,
result: result,
result,
}
.resolve()
.await?;

View File

@ -1,4 +1,4 @@
use crate::{impl_scalar_internal, Scalar, Result, Value};
use crate::{impl_scalar_internal, Result, Scalar, Value};
use bson::oid::ObjectId;
impl Scalar for ObjectId {

View File

@ -5,12 +5,12 @@ mod integers;
mod string;
mod url;
#[cfg(feature = "bson")]
mod bson;
#[cfg(feature = "chrono")]
mod datetime;
#[cfg(feature = "uuid")]
mod uuid;
#[cfg(feature = "bson")]
mod bson;
pub use id::ID;
@ -18,9 +18,9 @@ pub use id::ID;
mod tests {
use super::ID;
use crate::Type;
use bson::oid::ObjectId;
use chrono::{DateTime, Utc};
use uuid::Uuid;
use bson::oid::ObjectId;
#[test]
fn test_scalar_type() {

View File

@ -4,7 +4,7 @@ use crate::{
};
use std::borrow::Cow;
const STRING_DESC:&'static str = "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.";
const STRING_DESC: &str = "The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.";
impl Scalar for String {
fn type_name() -> &'static str {
@ -56,6 +56,6 @@ impl<'a> Type for &'a str {
#[async_trait::async_trait]
impl<'a> OutputValueType for &'a str {
async fn resolve(value: &Self, _: &ContextSelectionSet<'_>) -> Result<serde_json::Value> {
Ok(value.to_string().into())
Ok((*value).into())
}
}

View File

@ -56,7 +56,8 @@ impl<Query: ObjectType, Mutation: ObjectType, Subscription: SubscriptionType>
name: "if",
description: Some("Included when true."),
ty: "Boolean!".to_string(),
default_value: None
default_value: None,
validators: Default::default(),
});
args
}
@ -76,7 +77,8 @@ impl<Query: ObjectType, Mutation: ObjectType, Subscription: SubscriptionType>
name: "if",
description: Some("Skipped when true."),
ty: "Boolean!".to_string(),
default_value: None
default_value: None,
validators: Default::default(),
});
args
}

View File

@ -51,7 +51,7 @@ pub trait SubscriptionType: Type {
/// This function returns true of type `EmptySubscription` only
#[doc(hidden)]
fn is_empty() -> bool {
return false;
false
}
fn create_type(field: &Field, types: &mut HashMap<TypeId, Field>) -> Result<()>;

View File

@ -1,11 +1,11 @@
mod connection;
mod connection_type;
mod edge;
mod page_info;
mod slice;
use crate::{Context, ObjectType, QueryError, Result};
pub use connection::Connection;
pub use connection_type::Connection;
/// Connection query operation
pub enum QueryOperation<'a> {

View File

@ -26,7 +26,7 @@ impl<'a, T: Sync> DataSource for &'a [T] {
.and_then(|before| base64::decode(before).ok())
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| idx as usize)
.unwrap_or(self.len());
.unwrap_or_else(|| self.len());
let start = if end < *limit { 0 } else { end - *limit };
(start, end)
}

View File

@ -62,6 +62,6 @@ impl ObjectType for EmptyMutation {
#[async_trait::async_trait]
impl OutputValueType for EmptyMutation {
async fn resolve(_value: &Self, _ctx: &ContextSelectionSet<'_>) -> Result<serde_json::Value> {
return Err(QueryError::NotConfiguredMutations.into());
Err(QueryError::NotConfiguredMutations.into())
}
}

View File

@ -51,6 +51,6 @@ impl SubscriptionType for EmptySubscription {
#[async_trait::async_trait]
impl OutputValueType for EmptySubscription {
async fn resolve(_value: &Self, _ctx: &ContextSelectionSet<'_>) -> Result<serde_json::Value> {
return Err(QueryError::NotConfiguredSubscriptions.into());
Err(QueryError::NotConfiguredSubscriptions.into())
}
}

View File

@ -34,7 +34,7 @@ pub trait EnumType: Type + Sized + Eq + Send + Copy + Sized + 'static {
let items = Self::items();
for item in items {
if item.value == *self {
return Ok(item.name.clone().into());
return Ok(item.name.into());
}
}
unreachable!()

View File

@ -31,6 +31,7 @@ impl<T: InputValueType> InputValueType for Vec<T> {
}
}
#[allow(clippy::ptr_arg)]
#[async_trait::async_trait]
impl<T: OutputValueType + Send + Sync> OutputValueType for Vec<T> {
async fn resolve(value: &Self, ctx: &ContextSelectionSet<'_>) -> Result<serde_json::Value> {

View File

@ -47,6 +47,7 @@ impl<T: Type> Type for QueryRoot<T> {
description: None,
ty: "String!".to_string(),
default_value: None,
validators: Default::default(),
},
);
args
@ -87,7 +88,7 @@ impl<T: ObjectType + Send + Sync> ObjectType for QueryRoot<T> {
.map_err(|err| err.with_position(field.position).into());
}
return self.inner.resolve_field(ctx, field).await;
self.inner.resolve_field(ctx, field).await
}
async fn resolve_inline_fragment(

View File

@ -37,10 +37,10 @@ impl<'a> InputValueType for Upload {
if let Value::String(s) = value {
if s.starts_with("file:") {
let s = &s[5..];
if let Some(idx) = s.find("|") {
if let Some(idx) = s.find('|') {
let name_and_type = &s[..idx];
let content = &s[idx + 1..];
if let Some(type_idx) = name_and_type.find(":") {
if let Some(type_idx) = name_and_type.find(':') {
let name = &name_and_type[..type_idx];
let mime_type = &name_and_type[type_idx + 1..];
return Some(Self {

View File

@ -50,7 +50,7 @@ impl<'a> ValidatorContext<'a> {
}
pub fn parent_type(&self) -> Option<&'a registry::Type> {
self.type_stack.get(self.type_stack.len() - 2).map(|t| *t)
self.type_stack.get(self.type_stack.len() - 2).copied()
}
pub fn current_type(&self) -> &'a registry::Type {
@ -62,6 +62,6 @@ impl<'a> ValidatorContext<'a> {
}
pub fn fragment(&self, name: &str) -> Option<&'a FragmentDefinition> {
self.fragments.get(name).map(|f| *f)
self.fragments.get(name).copied()
}
}

View File

@ -36,6 +36,16 @@ impl<'a> Visitor<'a> for ArgumentsOfCorrectType<'a> {
.current_args
.and_then(|args| args.get(name).map(|input| input))
{
for validator in arg.validators.iter() {
if let Some(reason) = validator.is_valid(value) {
ctx.report_error(
vec![pos],
format!("Invalid value for argument \"{}\", {}", arg.name, reason,),
);
return;
}
}
if !is_valid_input_value(ctx.registry, &arg.ty, value) {
ctx.report_error(
vec![pos],

View File

@ -17,20 +17,18 @@ impl<'a> Visitor<'a> for DefaultValuesOfCorrectType {
"Argument \"{}\" has type \"{}\" and is not nullable, so it't can't have a default value",
variable_definition.name, variable_definition.var_type,
));
} else {
if !is_valid_input_value(
ctx.registry,
&variable_definition.var_type.to_string(),
value,
) {
ctx.report_error(
vec![variable_definition.position],
format!(
"Invalid default value for argument \"{}\", expected type \"{}\"",
variable_definition.name, variable_definition.var_type
),
)
}
} else if !is_valid_input_value(
ctx.registry,
&variable_definition.var_type.to_string(),
value,
) {
ctx.report_error(
vec![variable_definition.position],
format!(
"Invalid default value for argument \"{}\", expected type \"{}\"",
variable_definition.name, variable_definition.var_type
),
)
}
}
}

View File

@ -32,9 +32,8 @@ impl<'a, 'ctx> FindConflicts<'a, 'ctx> {
Selection::Field(field) => {
let output_name = field
.alias
.as_ref()
.map(|alias| alias.as_str())
.unwrap_or(field.name.as_str());
.as_deref()
.unwrap_or_else(|| field.name.as_str());
self.add_output(output_name, field);
}
Selection::InlineFragment(inline_fragment) => {

View File

@ -11,19 +11,19 @@ impl<'a> Visitor<'a> for ProvidedNonNullArguments {
fn enter_directive(&mut self, ctx: &mut ValidatorContext<'a>, directive: &'a Directive) {
if let Some(schema_directive) = ctx.registry.directives.get(&directive.name) {
for arg in schema_directive.args.values() {
if TypeName::create(&arg.ty).is_non_null() && arg.default_value.is_none() {
if directive
if TypeName::create(&arg.ty).is_non_null()
&& arg.default_value.is_none()
&& directive
.arguments
.iter()
.find(|(name, _)| name == arg.name)
.is_none()
{
ctx.report_error(vec![directive.position],
{
ctx.report_error(vec![directive.position],
format!(
"Directive \"@{}\" argument \"{}\" of type \"{}\" is required but not provided",
directive.name, arg.name, arg.ty
));
}
}
}
}
@ -33,19 +33,19 @@ impl<'a> Visitor<'a> for ProvidedNonNullArguments {
if let Some(parent_type) = ctx.parent_type() {
if let Some(schema_field) = parent_type.field_by_name(&field.name) {
for arg in schema_field.args.values() {
if TypeName::create(&arg.ty).is_non_null() && arg.default_value.is_none() {
if field
if TypeName::create(&arg.ty).is_non_null()
&& arg.default_value.is_none()
&& field
.arguments
.iter()
.find(|(name, _)| name == arg.name)
.is_none()
{
ctx.report_error(vec![field.position],
{
ctx.report_error(vec![field.position],
format!(
r#"Field "{}" argument "{}" of type "{}" is required but not provided"#,
field.name, arg.name, parent_type.name()
));
}
}
}
}

View File

@ -11,20 +11,17 @@ impl<'a> Visitor<'a> for UploadFile {
ctx: &mut ValidatorContext<'a>,
operation_definition: &'a OperationDefinition,
) {
match operation_definition {
OperationDefinition::Query(query) => {
for var in &query.variable_definitions {
if let Some(ty) = ctx.registry.basic_type_by_parsed_type(&var.var_type) {
if ty.name() == "Upload" {
ctx.report_error(
vec![var.position],
"The Upload type is only allowed to be defined on a mutation",
);
}
if let OperationDefinition::Query(query) = operation_definition {
for var in &query.variable_definitions {
if let Some(ty) = ctx.registry.basic_type_by_parsed_type(&var.var_type) {
if ty.name() == "Upload" {
ctx.report_error(
vec![var.position],
"The Upload type is only allowed to be defined on a mutation",
);
}
}
}
_ => {}
}
}
}

View File

@ -461,7 +461,7 @@ fn visit_arguments<'a, V: Visitor<'a>>(
v: &mut V,
ctx: &mut ValidatorContext<'a>,
pos: Pos,
arguments: &'a Vec<(Name, Value)>,
arguments: &'a [(Name, Value)],
) {
for (name, value) in arguments {
v.enter_argument(ctx, pos, name, value);
@ -472,7 +472,7 @@ fn visit_arguments<'a, V: Visitor<'a>>(
fn visit_variable_definitions<'a, V: Visitor<'a>>(
v: &mut V,
ctx: &mut ValidatorContext<'a>,
variable_definitions: &'a Vec<VariableDefinition>,
variable_definitions: &'a [VariableDefinition],
) {
for d in variable_definitions {
v.enter_variable_definition(ctx, d);
@ -483,7 +483,7 @@ fn visit_variable_definitions<'a, V: Visitor<'a>>(
fn visit_directives<'a, V: Visitor<'a>>(
v: &mut V,
ctx: &mut ValidatorContext<'a>,
directives: &'a Vec<Directive>,
directives: &'a [Directive],
) {
for d in directives {
v.enter_directive(ctx, d);

View File

@ -0,0 +1,30 @@
use crate::validators::InputValueValidator;
use graphql_parser::query::Value;
/// Integer range validator
pub struct IntRange {
/// Minimum value, including this value
pub min: i64,
/// Maximum value, including this value
pub max: i64,
}
impl InputValueValidator for IntRange {
fn is_valid(&self, value: &Value) -> Option<String> {
if let Value::Int(n) = value {
if n.as_i64().unwrap() < self.min || n.as_i64().unwrap() > self.max {
Some(format!(
"the value is {}, but the range must be between {} and {}",
n.as_i64().unwrap(),
self.min,
self.max
))
} else {
None
}
} else {
Some("expected type \"Int\"".to_string())
}
}
}

16
src/validators/mod.rs Normal file
View File

@ -0,0 +1,16 @@
mod int_validators;
use graphql_parser::schema::Value;
/// Input value validator
///
/// You can create your own input value validator by implementing this trait.
pub trait InputValueValidator
where
Self: Sync + Send,
{
/// Check value is valid, returns the reason for the error if it fails, otherwise None.
fn is_valid(&self, value: &Value) -> Option<String>;
}
pub use int_validators::IntRange;