Add support for multipart request

This commit is contained in:
sunli 2020-03-14 11:46:20 +08:00
parent bcf1e24268
commit abfe861749
20 changed files with 639 additions and 106 deletions

View File

@ -1,6 +1,6 @@
[package]
name = "async-graphql"
version = "1.0.0"
version = "1.1.0"
authors = ["sunli <scott_s829@163.com>"]
edition = "2018"
description = "The GraphQL server library implemented by rust"
@ -36,8 +36,10 @@ actix-rt = "1.0.0"
slab = "0.4.2"
tide = "0.6.0"
mime = "0.3.16"
async-graphql-actix-web = { path = "async-graphql-actix-web", version = "0.1.0" }
[workspace]
members = [
"async-graphql-derive"
"async-graphql-derive",
"async-graphql-actix-web"
]

View File

@ -91,6 +91,8 @@ Open `http://localhost:8000` in browser
- [X] FRAGMENT_SPREAD
- [X] INLINE_FRAGMENT
- [X] Schema
- [X] Multipart Request (https://github.com/jaydenseric/graphql-multipart-request-spec)
- [X] Actix-web
- [X] Validation rules
- [X] ArgumentsOfCorrectType
- [X] DefaultValuesOfCorrectType

View File

@ -0,0 +1,21 @@
[package]
name = "async-graphql-actix-web"
version = "0.1.0"
authors = ["sunli <scott_s829@163.com>"]
edition = "2018"
description = "The GraphQL server library implemented by rust"
publish = true
license = "MIT/Apache-2.0"
documentation = "https://docs.rs/async-graphql/"
homepage = "https://github.com/sunli829/async-graphql"
repository = "https://github.com/sunli829/async-graphql"
keywords = ["futures", "async", "graphql"]
categories = ["network-programming", "asynchronous"]
[dependencies]
async-graphql = { path = "..", version = "1.0.0" }
actix-web = "2.0.0"
actix-multipart = "0.2.0"
futures = "0.3.0"
serde_json = "1.0.48"
mime = "0.3.16"

View File

@ -0,0 +1,204 @@
use actix_multipart::Multipart;
use actix_web::http::{header, HeaderMap};
use actix_web::web::Payload;
use actix_web::{web, FromRequest, HttpRequest, HttpResponse, Responder};
use async_graphql::http::{GQLRequest, GQLResponse};
use async_graphql::{GQLObject, Schema};
use futures::StreamExt;
use mime::Mime;
use std::collections::HashMap;
use std::future::Future;
use std::pin::Pin;
use std::sync::Arc;
pub struct HandlerBuilder<Query, Mutation> {
schema: Schema<Query, Mutation>,
max_file_size: Option<usize>,
}
impl<Query, Mutation> HandlerBuilder<Query, Mutation>
where
Query: GQLObject + Send + Sync + 'static,
Mutation: GQLObject + Send + Sync + 'static,
{
pub fn new(schema: Schema<Query, Mutation>) -> Self {
Self {
schema,
max_file_size: Some(1024 * 1024 * 2),
}
}
pub fn max_file_size(self, size: usize) -> Self {
Self {
max_file_size: Some(size),
..self
}
}
pub fn build(
self,
) -> impl Fn(
HttpRequest,
Payload,
) -> Pin<Box<dyn Future<Output = actix_web::Result<HttpResponse>>>>
+ 'static
+ Clone {
let schema = Arc::new(self.schema);
let max_file_size = self.max_file_size;
move |req: HttpRequest, mut payload: Payload| {
let schema = schema.clone();
Box::pin(async move {
if req.method() != "POST" {
return Ok(HttpResponse::MethodNotAllowed().finish());
}
if let Ok(ct) = get_content_type(req.headers()) {
if ct.essence_str() == mime::MULTIPART_FORM_DATA {
let mut multipart = Multipart::from_request(&req, &mut payload.0).await?;
// read operators
let mut gql_request = {
let data = read_multipart(&mut multipart, "operations").await?;
serde_json::from_slice::<GQLRequest>(&data)
.map_err(|err| actix_web::error::ErrorBadRequest(err))?
};
// read map
let mut map = {
let data = read_multipart(&mut multipart, "map").await?;
serde_json::from_slice::<HashMap<String, Vec<String>>>(&data)
.map_err(|err| actix_web::error::ErrorBadRequest(err))?
};
let mut query = match gql_request.prepare(&schema) {
Ok(query) => query,
Err(err) => {
return Ok(web::Json(GQLResponse(Err(err)))
.respond_to(&req)
.await?)
}
};
if !query.is_upload() {
return Err(actix_web::error::ErrorBadRequest(
"It's not an upload operation",
));
}
// read files
while let Some(field) = multipart.next().await {
let mut field = field?;
if let Some(content_disposition) = field.content_disposition() {
if let (Some(name), Some(filename)) = (
content_disposition.get_name(),
content_disposition.get_filename(),
) {
if let Some(var_paths) = map.remove(name) {
let content_type = field.content_type().to_string();
let mut data = Vec::<u8>::new();
while let Some(part) = field.next().await {
let part = part.map_err(|err| {
actix_web::error::ErrorBadRequest(err)
})?;
data.extend(&part);
if let Some(max_file_size) = max_file_size {
if data.len() > max_file_size {
return Err(
actix_web::error::ErrorPayloadTooLarge(
"payload to large",
),
);
}
}
}
for var_path in var_paths {
query.set_upload(
&var_path,
filename,
Some(&content_type),
data.clone(),
);
}
} else {
return Err(actix_web::error::ErrorBadRequest(
"bad request",
));
}
} else {
return Err(actix_web::error::ErrorBadRequest("bad request"));
}
} else {
return Err(actix_web::error::ErrorBadRequest("bad request"));
}
}
if !map.is_empty() {
return Err(actix_web::error::ErrorBadRequest("missing files"));
}
Ok(web::Json(GQLResponse(query.execute().await))
.respond_to(&req)
.await?)
} else if ct.essence_str() == mime::APPLICATION_JSON {
let gql_req =
web::Json::<GQLRequest>::from_request(&req, &mut payload.0).await?;
Ok(web::Json(gql_req.into_inner().execute(&schema).await)
.respond_to(&req)
.await?)
} else {
Ok(HttpResponse::UnsupportedMediaType().finish())
}
} else {
Ok(HttpResponse::UnsupportedMediaType().finish())
}
})
}
}
}
fn get_content_type(headers: &HeaderMap) -> actix_web::Result<Mime> {
if let Some(content_type) = headers.get(&header::CONTENT_TYPE) {
if let Ok(content_type) = content_type.to_str() {
if let Ok(ct) = content_type.parse::<Mime>() {
return Ok(ct);
}
}
}
Err(actix_web::error::ErrorUnsupportedMediaType(
"unsupported media type",
))
}
async fn read_multipart(multipart: &mut Multipart, name: &str) -> actix_web::Result<Vec<u8>> {
let data = match multipart.next().await {
Some(Ok(mut field)) => {
if let Some(content_disposition) = field.content_disposition() {
if let Some(current_name) = content_disposition.get_name() {
if current_name != name {
return Err(actix_web::error::ErrorBadRequest(format!(
"expect \"{}\"",
name
)));
}
let mut data = Vec::<u8>::new();
while let Some(part) = field.next().await {
let part = part.map_err(|err| actix_web::error::ErrorBadRequest(err))?;
data.extend(&part);
}
data
} else {
return Err(actix_web::error::ErrorBadRequest("missing \"operations\""));
}
} else {
return Err(actix_web::error::ErrorBadRequest("bad request"));
}
}
Some(Err(err)) => return Err(err.into()),
None => return Err(actix_web::error::ErrorBadRequest("bad request")),
};
Ok(data)
}

View File

@ -1,15 +1,9 @@
mod starwars;
use actix_web::{guard, web, App, HttpResponse, HttpServer};
use async_graphql::http::{graphiql_source, playground_source, GQLRequest, GQLResponse};
use async_graphql::http::{graphiql_source, playground_source};
use async_graphql::{GQLEmptyMutation, Schema};
type StarWarsSchema = Schema<starwars::QueryRoot, GQLEmptyMutation>;
async fn index(s: web::Data<StarWarsSchema>, req: web::Json<GQLRequest>) -> web::Json<GQLResponse> {
web::Json(req.into_inner().execute(&s).await)
}
async fn gql_playgound() -> HttpResponse {
HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
@ -25,11 +19,12 @@ async fn gql_graphiql() -> HttpResponse {
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(move || {
let schema =
Schema::new(starwars::QueryRoot, GQLEmptyMutation).data(starwars::StarWars::new());
let handler = async_graphql_actix_web::HandlerBuilder::new(schema).build();
App::new()
.data(
Schema::new(starwars::QueryRoot, GQLEmptyMutation).data(starwars::StarWars::new()),
)
.service(web::resource("/").guard(guard::Post()).to(index))
.service(web::resource("/").to(handler))
.service(web::resource("/").guard(guard::Get()).to(gql_playgound))
.service(
web::resource("/graphiql")

46
examples/upload-file.rs Normal file
View File

@ -0,0 +1,46 @@
use actix_web::{web, App, HttpServer};
use async_graphql::{Schema, Upload};
struct QueryRoot;
#[async_graphql::Object]
impl QueryRoot {}
struct MutationRoot;
#[async_graphql::Object]
impl MutationRoot {
#[field]
async fn single_upload(&self, file: Upload) -> bool {
println!(
"upload: filename={} size={}",
file.filename,
file.content.len()
);
true
}
#[field]
async fn multiple_upload(&self, files: Vec<Upload>) -> bool {
for upload in files {
println!(
"upload: filename={} size={}",
upload.filename,
upload.content.len()
);
}
true
}
}
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(move || {
let schema = Schema::new(QueryRoot, MutationRoot);
let handler = async_graphql_actix_web::HandlerBuilder::new(schema).build();
App::new().service(web::resource("/").to(handler))
})
.bind("127.0.0.1:8000")?
.run()
.await
}

View File

@ -10,32 +10,104 @@ use std::hash::BuildHasherDefault;
use std::ops::{Deref, DerefMut};
/// Variables of query
#[derive(Default)]
pub struct Variables(BTreeMap<String, Value>);
#[derive(Debug)]
pub struct Variables(Value);
impl Default for Variables {
fn default() -> Self {
Self(Value::Object(Default::default()))
}
}
impl Deref for Variables {
type Target = BTreeMap<String, Value>;
fn deref(&self) -> &Self::Target {
&self.0
if let Value::Object(obj) = &self.0 {
obj
} else {
unreachable!()
}
}
}
impl DerefMut for Variables {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
if let Value::Object(obj) = &mut self.0 {
obj
} else {
unreachable!()
}
}
}
impl Variables {
pub(crate) fn parse_from_json(value: serde_json::Value) -> Result<Self> {
let gql_value = json_value_to_gql_value(value);
if let Value::Object(obj) = gql_value {
Ok(Variables(obj))
if let Value::Object(_) = gql_value {
Ok(Variables(gql_value))
} else {
Ok(Default::default())
}
}
pub(crate) fn set_upload(
&mut self,
var_path: &str,
filename: &str,
content_type: Option<&str>,
content: Vec<u8>,
) {
let mut it = var_path.split(".").peekable();
if let Some(first) = it.next() {
if first != "variables" {
return;
}
}
let mut current = &mut self.0;
while let Some(s) = it.next() {
let has_next = it.peek().is_some();
if let Ok(idx) = s.parse::<i32>() {
if let Value::List(ls) = current {
if let Some(value) = ls.get_mut(idx as usize) {
if !has_next {
*value = Value::String(file_string(filename, content_type, &content));
return;
} else {
current = value;
}
} else {
return;
}
}
} else {
if let Value::Object(obj) = current {
if let Some(value) = obj.get_mut(s) {
if !has_next {
*value = Value::String(file_string(filename, content_type, &content));
return;
} else {
current = value;
}
} else {
return;
}
}
}
}
}
}
fn file_string(filename: &str, content_type: Option<&str>, content: &[u8]) -> String {
if let Some(content_type) = content_type {
format!("file:{}:{}|", filename, content_type)
+ unsafe { std::str::from_utf8_unchecked(content) }
} else {
format!("file:{}|", filename) + unsafe { std::str::from_utf8_unchecked(content) }
}
}
fn json_value_to_gql_value(value: serde_json::Value) -> Value {
@ -74,11 +146,11 @@ pub type Context<'a> = ContextBase<'a, &'a Field>;
pub struct ContextBase<'a, T> {
pub(crate) item: T,
pub(crate) variables: Option<&'a Variables>,
pub(crate) variables: &'a Variables,
pub(crate) variable_definitions: Option<&'a [VariableDefinition]>,
pub(crate) registry: &'a Registry,
pub(crate) data: &'a Data,
pub(crate) fragments: &'a HashMap<String, &'a FragmentDefinition>,
pub(crate) fragments: &'a HashMap<String, FragmentDefinition>,
}
impl<'a, T> Deref for ContextBase<'a, T> {
@ -116,7 +188,7 @@ impl<'a, T> ContextBase<'a, T> {
.variable_definitions
.and_then(|defs| defs.iter().find(|def| def.name == name));
if let Some(def) = def {
if let Some(var_value) = self.variables.map(|vars| vars.get(&def.name)).flatten() {
if let Some(var_value) = self.variables.get(&def.name) {
return Ok(var_value.clone());
} else if let Some(default) = &def.default_value {
return Ok(default.clone());

View File

@ -24,6 +24,9 @@ pub enum QueryError {
#[error("Cannot query field \"{field_name}\" on type \"{object}\".")]
FieldNotFound { field_name: String, object: String },
#[error("Missing operation")]
MissingOperation,
#[error("Unknown operation named \"{name}\"")]
UnknownOperationNamed { name: String },

View File

@ -5,6 +5,7 @@ pub use graphiql_source::graphiql_source;
pub use playground_source::playground_source;
use crate::error::{RuleError, RuleErrors};
use crate::schema::PreparedQuery;
use crate::{GQLObject, PositionError, Result, Schema, Variables};
use graphql_parser::Pos;
use serde::ser::{SerializeMap, SerializeSeq};
@ -20,32 +21,46 @@ pub struct GQLRequest {
}
impl GQLRequest {
pub async fn execute<Query, Mutation>(self, schema: &Schema<Query, Mutation>) -> GQLResponse
pub async fn execute<Query, Mutation>(mut self, schema: &Schema<Query, Mutation>) -> GQLResponse
where
Query: GQLObject + Send + Sync,
Mutation: GQLObject + Send + Sync,
{
let vars = match self.variables {
match self.prepare(schema) {
Ok(query) => GQLResponse(query.execute().await),
Err(err) => GQLResponse(Err(err)),
}
}
pub fn prepare<'a, Query, Mutation>(
&'a mut self,
schema: &'a Schema<Query, Mutation>,
) -> Result<PreparedQuery<'a, Query, Mutation>>
where
Query: GQLObject + Send + Sync,
Mutation: GQLObject + Send + Sync,
{
let vars = match self.variables.take() {
Some(value) => match Variables::parse_from_json(value) {
Ok(vars) => Some(vars),
Err(err) => return GQLResponse(Err(err)),
Err(err) => return Err(err),
},
None => None,
};
let query = schema.query(&self.query);
let query = match &vars {
let query = match vars {
Some(vars) => query.variables(vars),
None => query,
};
let query = match &self.operation_name {
Some(operation_name) => query.operator_name(operation_name),
Some(name) => query.operator_name(name),
None => query,
};
GQLResponse(query.execute().await)
query.prepare()
}
}
pub struct GQLResponse(Result<serde_json::Value>);
pub struct GQLResponse(pub Result<serde_json::Value>);
impl Serialize for GQLResponse {
fn serialize<S: Serializer>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> {

View File

@ -80,7 +80,7 @@ pub use error::{ErrorWithPosition, PositionError, QueryError, QueryParseError};
pub use graphql_parser::query::Value;
pub use scalars::ID;
pub use schema::{QueryBuilder, Schema};
pub use types::GQLEmptyMutation;
pub use types::{GQLEmptyMutation, Upload};
pub type Result<T> = anyhow::Result<T>;
pub type Error = anyhow::Error;

View File

@ -1,4 +1,5 @@
use crate::{model, GQLType, Value};
use graphql_parser::query::Type as ParsedType;
use std::collections::{HashMap, HashSet};
fn parse_non_null(type_name: &str) -> Option<&str> {
@ -228,7 +229,15 @@ impl Registry {
});
}
pub fn get_basic_type(&self, type_name: &str) -> Option<&Type> {
pub fn basic_type_by_typename(&self, type_name: &str) -> Option<&Type> {
self.types.get(TypeName::get_basic_typename(type_name))
}
pub fn basic_type_by_parsed_type(&self, query_type: &ParsedType) -> Option<&Type> {
match query_type {
ParsedType::NonNullType(ty) => self.basic_type_by_parsed_type(ty),
ParsedType::ListType(ty) => self.basic_type_by_parsed_type(ty),
ParsedType::NamedType(name) => self.types.get(name.as_str()),
}
}
}

View File

@ -7,7 +7,9 @@ use crate::{
ContextBase, GQLObject, GQLOutputValue, GQLType, QueryError, QueryParseError, Result, Variables,
};
use graphql_parser::parse_query;
use graphql_parser::query::{Definition, OperationDefinition};
use graphql_parser::query::{
Definition, FragmentDefinition, OperationDefinition, SelectionSet, VariableDefinition,
};
use std::any::Any;
use std::collections::HashMap;
@ -116,6 +118,11 @@ impl<Query: GQLObject, Mutation: GQLObject> Schema<Query, Mutation> {
}
}
enum Root<'a, Query, Mutation> {
Query(&'a QueryRoot<Query>),
Mutation(&'a Mutation),
}
/// Query builder
pub struct QueryBuilder<'a, Query, Mutation> {
query: &'a QueryRoot<Query>,
@ -123,7 +130,7 @@ pub struct QueryBuilder<'a, Query, Mutation> {
registry: &'a Registry,
query_source: &'a str,
operation_name: Option<&'a str>,
variables: Option<&'a Variables>,
variables: Option<Variables>,
data: &'a Data,
}
@ -137,86 +144,148 @@ impl<'a, Query, Mutation> QueryBuilder<'a, Query, Mutation> {
}
/// Specify the variables.
pub fn variables(self, vars: &'a Variables) -> Self {
pub fn variables(self, vars: Variables) -> Self {
QueryBuilder {
variables: Some(vars),
..self
}
}
/// Prepare query
pub fn prepare(self) -> Result<PreparedQuery<'a, Query, Mutation>> {
let document =
parse_query(self.query_source).map_err(|err| QueryParseError(err.to_string()))?;
check_rules(self.registry, &document)?;
let mut fragments = HashMap::new();
let mut selection_set = None;
let mut variable_definitions = None;
let mut root = None;
for definition in document.definitions {
match definition {
Definition::Operation(operation_definition) => match operation_definition {
OperationDefinition::SelectionSet(s) => {
selection_set = Some(s);
root = Some(Root::Query(self.query));
break;
}
OperationDefinition::Query(query)
if query.name.is_none() || query.name.as_deref() == self.operation_name =>
{
selection_set = Some(query.selection_set);
variable_definitions = Some(query.variable_definitions);
root = Some(Root::Query(self.query));
break;
}
OperationDefinition::Mutation(mutation)
if mutation.name.is_none()
|| mutation.name.as_deref() == self.operation_name =>
{
selection_set = Some(mutation.selection_set);
variable_definitions = Some(mutation.variable_definitions);
root = Some(Root::Mutation(self.mutation));
break;
}
OperationDefinition::Subscription(subscription)
if subscription.name.is_none()
|| subscription.name.as_deref() == self.operation_name =>
{
return Err(QueryError::NotSupported.into());
}
_ => {}
},
Definition::Fragment(fragment) => {
fragments.insert(fragment.name.clone(), fragment);
}
}
}
Ok(PreparedQuery {
registry: self.registry,
variables: self.variables.unwrap_or_default(),
data: self.data,
fragments,
selection_set: selection_set.ok_or({
if let Some(name) = self.operation_name {
QueryError::UnknownOperationNamed {
name: name.to_string(),
}
} else {
QueryError::MissingOperation
}
})?,
root: root.unwrap(),
variable_definitions,
})
}
/// Execute the query.
pub async fn execute(self) -> Result<serde_json::Value>
where
Query: GQLObject + Send + Sync,
Mutation: GQLObject + Send + Sync,
{
let document =
parse_query(self.query_source).map_err(|err| QueryParseError(err.to_string()))?;
let mut fragments = HashMap::new();
check_rules(self.registry, &document)?;
for definition in &document.definitions {
if let Definition::Fragment(fragment) = definition {
fragments.insert(fragment.name.clone(), fragment);
}
}
for definition in &document.definitions {
match definition {
Definition::Operation(OperationDefinition::SelectionSet(selection_set)) => {
if self.operation_name.is_none() {
let ctx = ContextBase {
item: selection_set,
variables: self.variables.as_deref(),
variable_definitions: None,
registry: &self.registry,
data: self.data,
fragments: &fragments,
};
return GQLOutputValue::resolve(self.query, &ctx).await;
}
}
Definition::Operation(OperationDefinition::Query(query)) => {
if self.operation_name.is_none()
|| self.operation_name == query.name.as_ref().map(|s| s.as_str())
{
let ctx = ContextBase {
item: &query.selection_set,
variables: self.variables.as_deref(),
variable_definitions: Some(&query.variable_definitions),
registry: self.registry.clone(),
data: self.data,
fragments: &fragments,
};
return GQLOutputValue::resolve(self.query, &ctx).await;
}
}
Definition::Operation(OperationDefinition::Mutation(mutation)) => {
if self.operation_name.is_none()
|| self.operation_name == mutation.name.as_ref().map(|s| s.as_str())
{
let ctx = ContextBase {
item: &mutation.selection_set,
variables: self.variables.as_deref(),
variable_definitions: Some(&mutation.variable_definitions),
registry: self.registry.clone(),
data: self.data,
fragments: &fragments,
};
return GQLOutputValue::resolve(self.mutation, &ctx).await;
}
}
_ => {}
}
}
if let Some(operation_name) = self.operation_name {
anyhow::bail!(QueryError::UnknownOperationNamed {
name: operation_name.to_string()
});
}
Ok(serde_json::Value::Null)
self.prepare()?.execute().await
}
}
pub struct PreparedQuery<'a, Query, Mutation> {
root: Root<'a, Query, Mutation>,
registry: &'a Registry,
variables: Variables,
data: &'a Data,
fragments: HashMap<String, FragmentDefinition>,
selection_set: SelectionSet,
variable_definitions: Option<Vec<VariableDefinition>>,
}
impl<'a, Query, Mutation> PreparedQuery<'a, Query, Mutation> {
/// Detects whether any parameter contains the Upload type
pub fn is_upload(&self) -> bool {
if let Some(variable_definitions) = &self.variable_definitions {
for d in variable_definitions {
if let Some(ty) = self.registry.basic_type_by_parsed_type(&d.var_type) {
if ty.name() == "Upload" {
return true;
}
}
}
}
false
}
/// Set upload files
pub fn set_upload(
&mut self,
var_path: &str,
filename: &str,
content_type: Option<&str>,
content: Vec<u8>,
) {
self.variables
.set_upload(var_path, filename, content_type, content);
}
/// Execute the query.
pub async fn execute(self) -> Result<serde_json::Value>
where
Query: GQLObject + Send + Sync,
Mutation: GQLObject + Send + Sync,
{
let ctx = ContextBase {
item: &self.selection_set,
variables: &self.variables,
variable_definitions: self.variable_definitions.as_deref(),
registry: self.registry.clone(),
data: self.data,
fragments: &self.fragments,
};
match self.root {
Root::Query(query) => return GQLOutputValue::resolve(query, &ctx).await,
Root::Mutation(mutation) => return GQLOutputValue::resolve(mutation, &ctx).await,
}
}
}

View File

@ -3,7 +3,9 @@ mod r#enum;
mod list;
mod optional;
mod query_root;
mod upload;
pub use empty_mutation::GQLEmptyMutation;
pub use query_root::QueryRoot;
pub use r#enum::{GQLEnum, GQLEnumItem};
pub use upload::Upload;

58
src/types/upload.rs Normal file
View File

@ -0,0 +1,58 @@
use crate::{registry, GQLInputValue, GQLType, Value};
use std::borrow::Cow;
/// Upload file type
///
/// Reference: https://github.com/jaydenseric/graphql-multipart-request-spec
pub struct Upload {
pub filename: String,
pub content_type: Option<String>,
pub content: Vec<u8>,
}
impl<'a> GQLType for Upload {
fn type_name() -> Cow<'static, str> {
Cow::Borrowed("Upload")
}
fn create_type_info(registry: &mut registry::Registry) -> String {
registry.create_type::<Self, _>(|_| registry::Type::Scalar {
name: Self::type_name().to_string(),
description: None,
is_valid: |value| match value {
Value::String(s) => s.starts_with("file:"),
_ => false,
},
})
}
}
impl<'a> GQLInputValue for Upload {
fn parse(value: &Value) -> Option<Self> {
if let Value::String(s) = value {
if s.starts_with("file:") {
let s = &s[5..];
if let Some(idx) = s.find("|") {
let name_and_type = &s[..idx];
let content = &s[idx + 1..];
if let Some(type_idx) = name_and_type.find(":") {
let name = &name_and_type[..type_idx];
let mime_type = &name_and_type[type_idx + 1..];
return Some(Self {
filename: name.to_string(),
content_type: Some(mime_type.to_string()),
content: content.as_bytes().to_vec(),
});
} else {
return Some(Self {
filename: name_and_type.to_string(),
content_type: None,
content: content.as_bytes().to_vec(),
});
}
}
}
}
None
}
}

View File

@ -35,7 +35,8 @@ pub fn check_rules(registry: &Registry, doc: &Document) -> Result<()> {
.with(rules::PossibleFragmentSpreads::default())
.with(rules::ProvidedNonNullArguments)
.with(rules::KnownDirectives::default())
.with(rules::OverlappingFieldsCanBeMerged);
.with(rules::OverlappingFieldsCanBeMerged)
.with(rules::UploadFile);
visit(&mut visitor, &mut ctx, doc);
if !ctx.errors.is_empty() {

View File

@ -19,6 +19,7 @@ mod unique_argument_names;
mod unique_fragment_names;
mod unique_operation_names;
mod unique_variable_names;
mod upload_file;
mod variables_are_input_types;
mod variables_in_allowed_position;
@ -43,5 +44,6 @@ pub use unique_argument_names::UniqueArgumentNames;
pub use unique_fragment_names::UniqueFragmentNames;
pub use unique_operation_names::UniqueOperationNames;
pub use unique_variable_names::UniqueVariableNames;
pub use upload_file::UploadFile;
pub use variables_are_input_types::VariablesAreInputTypes;
pub use variables_in_allowed_position::VariableInAllowedPosition;

View File

@ -9,7 +9,7 @@ impl<'a> Visitor<'a> for ScalarLeafs {
fn enter_field(&mut self, ctx: &mut ValidatorContext<'a>, field: &'a Field) {
if let Some(ty) = ctx.parent_type() {
if let Some(schema_field) = ty.field_by_name(&field.name) {
if let Some(ty) = ctx.registry.get_basic_type(&schema_field.ty) {
if let Some(ty) = ctx.registry.basic_type_by_typename(&schema_field.ty) {
if ty.is_leaf() && !field.selection_set.items.is_empty() {
ctx.report_error(vec![field.position], format!(
"Field \"{}\" must not have a selection since type \"{}\" has no subfields",

View File

@ -0,0 +1,30 @@
use crate::validation::context::ValidatorContext;
use crate::validation::visitor::Visitor;
use graphql_parser::query::OperationDefinition;
#[derive(Default)]
pub struct UploadFile;
impl<'a> Visitor<'a> for UploadFile {
fn enter_operation_definition(
&mut self,
ctx: &mut ValidatorContext<'a>,
operation_definition: &'a OperationDefinition,
) {
match operation_definition {
OperationDefinition::Query(query) => {
for var in &query.variable_definitions {
if let Some(ty) = ctx.registry.basic_type_by_parsed_type(&var.var_type) {
if ty.name() == "Upload" {
ctx.report_error(
vec![var.position],
"The Upload type is only allowed to be defined on a mutation",
);
}
}
}
}
_ => {}
}
}
}

View File

@ -13,7 +13,7 @@ impl<'a> Visitor<'a> for VariablesAreInputTypes {
) {
if let Some(ty) = ctx
.registry
.get_basic_type(&variable_definition.var_type.to_string())
.basic_type_by_parsed_type(&variable_definition.var_type)
{
if !ty.is_input() {
ctx.report_error(

View File

@ -404,7 +404,9 @@ fn visit_selection<'a, V: Visitor<'a>>(
Selection::Field(field) => {
if let Some(schema_field) = ctx.current_type().field_by_name(&field.name) {
ctx.with_type(
ctx.registry.get_basic_type(&schema_field.ty).unwrap(),
ctx.registry
.basic_type_by_typename(&schema_field.ty)
.unwrap(),
|ctx| {
visit_field(v, ctx, field);
},