Relay pagination algorithm

This commit is contained in:
Blaine Bublitz 2020-05-05 00:22:01 -07:00
parent 43cf45317d
commit e696061666
2 changed files with 337 additions and 65 deletions

View File

@ -10,29 +10,98 @@ pub use connection_type::Connection;
pub use cursor::Cursor; pub use cursor::Cursor;
/// Connection query operation /// Connection query operation
pub enum QueryOperation<'a> { pub enum QueryOperation {
/// Forward query /// Return all results
Forward { None,
/// Return all results after the cursor
After {
/// After this cursor /// After this cursor
after: Option<&'a str>, after: Cursor,
/// How many records did this query return
limit: usize,
}, },
/// Backward query /// Return all results before the cursor
Backward { Before {
/// Before this cursor /// Before this cursor
before: Option<&'a str>, before: Cursor,
},
/// How many records did this query return /// Return all results between the cursors
Between {
/// After this cursor
after: Cursor,
/// But before this cursor
before: Cursor,
},
/// Return the amount of results specified by `limit`, starting from the beginning
First {
/// The maximum amount of results to return
limit: usize, limit: usize,
}, },
/// Return the amount of results specified by `limit`, starting after the cursor
FirstAfter {
/// The maximum amount of results to return
limit: usize,
/// After this cursor
after: Cursor,
},
/// Return the amount of results specified by `limit`, starting from the beginning but ending before the cursor
FirstBefore {
/// The maximum amount of results to return
limit: usize,
/// Before this cursor
before: Cursor,
},
/// Return the amount of results specified by `limit`, but between the cursors. Limit includes beginning results.
FirstBetween {
/// The maximum amount of results to return
limit: usize,
/// After this cursor
after: Cursor,
/// But before this cursor
before: Cursor,
},
/// Return the amount of results specified by `limit`, but before the end
Last {
/// The maximum amount of results to return
limit: usize,
},
/// Return the amount of results specified by `limit`, but before the end. Must not include anything before the cursor.
LastAfter {
/// The maximum amount of results to return
limit: usize,
/// After this cursor
after: Cursor,
},
/// Return the amount of results specified by `limit`, but before the cursor
LastBefore {
/// The maximum amount of results to return
limit: usize,
/// Before this cursor
before: Cursor,
},
/// Return the amount of results specified by `limit`, but between the cursors. Limit includes ending results.
LastBetween {
/// The maximum amount of results to return
limit: usize,
/// After this cursor
after: Cursor,
/// But before this cursor
before: Cursor,
},
/// An invalid query was made. For example: sending `first` and `last` in the same query
Invalid,
} }
/// Empty edge extension object /// Empty edge extension object
#[async_graphql_derive::SimpleObject(internal)] #[async_graphql_derive::SimpleObject(internal)]
pub struct EmptyEdgeFields; pub struct EmptyEdgeFields;
// Temporary struct for to store values for pattern matching
struct Pagination {
after: Option<Cursor>,
before: Option<Cursor>,
first: Option<i32>,
last: Option<i32>,
}
/// Data source of GraphQL Cursor Connections type /// Data source of GraphQL Cursor Connections type
/// ///
/// `Edge` is an extension object type that extends the edge fields, If you don't need it, you can use `EmptyEdgeFields`. /// `Edge` is an extension object type that extends the edge fields, If you don't need it, you can use `EmptyEdgeFields`.
@ -60,23 +129,37 @@ pub struct EmptyEdgeFields;
/// type Element = i32; /// type Element = i32;
/// type EdgeFieldsObj = DiffFields; /// type EdgeFieldsObj = DiffFields;
/// ///
/// async fn query_operation(&self, operation: &QueryOperation<'_>) -> FieldResult<Connection<Self::Element, Self::EdgeFieldsObj>> { /// async fn query_operation(&self, operation: &QueryOperation) -> FieldResult<Connection<Self::Element, Self::EdgeFieldsObj>> {
/// let (start, end) = match operation { /// let (start, end) = match operation {
/// QueryOperation::Forward {after, limit} => { /// QueryOperation::First {limit} => {
/// let start = after.and_then(|after| base64::decode(after).ok()) /// let start = 0;
/// let end = start + *limit as i32;
/// (start, end)
/// }
/// QueryOperation::Last {limit} => {
/// let end = 0;
/// let start = end - *limit as i32;
/// (start, end)
/// }
/// QueryOperation::FirstAfter {after, limit} => {
/// let start = base64::decode(after.to_string())
/// .ok()
/// .and_then(|data| data.as_slice().read_i32::<BE>().ok()) /// .and_then(|data| data.as_slice().read_i32::<BE>().ok())
/// .map(|idx| idx + 1) /// .map(|idx| idx + 1)
/// .unwrap_or(0); /// .unwrap_or(0);
/// let end = start + *limit as i32; /// let end = start + *limit as i32;
/// (start, end) /// (start, end)
/// } /// }
/// QueryOperation::Backward {before, limit} => { /// QueryOperation::LastBefore {before, limit} => {
/// let end = before.and_then(|before| base64::decode(before).ok()) /// let end = base64::decode(before.to_string())
/// .ok()
/// .and_then(|data| data.as_slice().read_i32::<BE>().ok()) /// .and_then(|data| data.as_slice().read_i32::<BE>().ok())
/// .unwrap_or(0); /// .unwrap_or(0);
/// let start = end - *limit as i32; /// let start = end - *limit as i32;
/// (start, end) /// (start, end)
/// } /// }
/// // You should handle all cases instead of using a default like this
/// _ => (0, 10)
/// }; /// };
/// ///
/// let nodes = (start..end).into_iter().map(|n| (base64::encode(n.to_be_bytes()).into(), DiffFields {diff: n - 1000}, n)).collect(); /// let nodes = (start..end).into_iter().map(|n| (base64::encode(n.to_be_bytes()).into(), DiffFields {diff: n - 1000}, n)).collect();
@ -87,8 +170,8 @@ pub struct EmptyEdgeFields;
/// #[Object] /// #[Object]
/// impl QueryRoot { /// impl QueryRoot {
/// async fn numbers(&self, ctx: &Context<'_>, /// async fn numbers(&self, ctx: &Context<'_>,
/// after: Option<String>, /// after: Option<Cursor>,
/// before: Option<String>, /// before: Option<Cursor>,
/// first: Option<i32>, /// first: Option<i32>,
/// last: Option<i32> /// last: Option<i32>
/// ) -> FieldResult<Connection<i32, DiffFields>> { /// ) -> FieldResult<Connection<i32, DiffFields>> {
@ -133,42 +216,122 @@ pub trait DataSource: Sync + Send {
async fn query( async fn query(
&self, &self,
_ctx: &Context<'_>, _ctx: &Context<'_>,
after: Option<String>, after: Option<Cursor>,
before: Option<String>, before: Option<Cursor>,
first: Option<i32>, first: Option<i32>,
last: Option<i32>, last: Option<i32>,
) -> FieldResult<Connection<Self::Element, Self::EdgeFieldsObj>> { ) -> FieldResult<Connection<Self::Element, Self::EdgeFieldsObj>> {
let operation = if let Some(after) = &after { let pagination = Pagination {
QueryOperation::Forward { first,
after: Some(after), last,
limit: match first { before,
Some(value) => value.max(0) as usize, after,
None => 10, };
},
} let operation = match pagination {
} else if let Some(before) = &before { // This is technically allowed according to the Relay Spec, but highly discouraged
QueryOperation::Backward { Pagination {
before: Some(before), first: Some(_),
limit: match last { last: Some(_),
Some(value) => value.max(0) as usize, before: _,
None => 10, after: _,
}, } => QueryOperation::Invalid,
} Pagination {
} else if let Some(first) = first { first: None,
QueryOperation::Forward { last: None,
after: None,
limit: first.max(0) as usize,
}
} else if let Some(last) = last {
QueryOperation::Backward {
before: None, before: None,
limit: last.max(0) as usize,
}
} else {
QueryOperation::Forward {
after: None, after: None,
limit: 10, } => QueryOperation::None,
} Pagination {
first: None,
last: None,
before: Some(before),
after: None,
} => QueryOperation::Before { before },
Pagination {
first: None,
last: None,
before: None,
after: Some(after),
} => QueryOperation::After { after },
Pagination {
first: None,
last: None,
before: Some(before),
after: Some(after),
} => QueryOperation::Between { after, before },
Pagination {
first: Some(limit),
last: None,
before: None,
after: None,
} => QueryOperation::First {
limit: limit.max(0) as usize,
},
Pagination {
first: Some(limit),
last: None,
before: Some(before),
after: None,
} => QueryOperation::FirstBefore {
limit: limit.max(0) as usize,
before,
},
Pagination {
first: Some(limit),
last: None,
before: None,
after: Some(after),
} => QueryOperation::FirstAfter {
limit: limit.max(0) as usize,
after,
},
Pagination {
first: Some(limit),
last: None,
before: Some(before),
after: Some(after),
} => QueryOperation::FirstBetween {
limit: limit.max(0) as usize,
after,
before,
},
Pagination {
first: None,
last: Some(limit),
before: None,
after: None,
} => QueryOperation::Last {
limit: limit.max(0) as usize,
},
Pagination {
first: None,
last: Some(limit),
before: Some(before),
after: None,
} => QueryOperation::LastBefore {
limit: limit.max(0) as usize,
before,
},
Pagination {
first: None,
last: Some(limit),
before: None,
after: Some(after),
} => QueryOperation::LastAfter {
limit: limit.max(0) as usize,
after,
},
Pagination {
first: None,
last: Some(limit),
before: Some(before),
after: Some(after),
} => QueryOperation::LastBetween {
limit: limit.max(0) as usize,
after,
before,
},
}; };
self.query_operation(&operation).await self.query_operation(&operation).await
@ -177,6 +340,6 @@ pub trait DataSource: Sync + Send {
/// Parses the parameters and executes the queryUsually you just need to implement this method. /// Parses the parameters and executes the queryUsually you just need to implement this method.
async fn query_operation( async fn query_operation(
&self, &self,
operation: &QueryOperation<'_>, operation: &QueryOperation,
) -> FieldResult<Connection<Self::Element, Self::EdgeFieldsObj>>; ) -> FieldResult<Connection<Self::Element, Self::EdgeFieldsObj>>;
} }

View File

@ -9,30 +9,138 @@ impl<'a, T: Sync> DataSource for &'a [T] {
async fn query_operation( async fn query_operation(
&self, &self,
operation: &QueryOperation<'_>, operation: &QueryOperation,
) -> FieldResult<Connection<Self::Element, Self::EdgeFieldsObj>> { ) -> FieldResult<Connection<Self::Element, Self::EdgeFieldsObj>> {
let (start, end) = match operation { let (start, end) = match operation {
QueryOperation::Forward { after, limit } => { QueryOperation::None => {
let start = after let start = 0;
.and_then(|after| base64::decode(after).ok()) let end = self.len();
(start, end)
}
QueryOperation::After { after } => {
let start = base64::decode(after.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| (idx + 1) as usize)
.unwrap_or(0);
let end = self.len();
(start, end)
}
QueryOperation::Before { before } => {
let end = base64::decode(before.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| idx as usize)
.unwrap_or_else(|| self.len());
let start = 0;
(start, end)
}
QueryOperation::Between { after, before } => {
let start = base64::decode(after.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| (idx + 1) as usize)
.unwrap_or(0);
let end = base64::decode(before.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| idx as usize)
.unwrap_or_else(|| self.len());
(start, end)
}
QueryOperation::First { limit } => {
let start = 0;
let end = (start + *limit).min(self.len());
(start, end)
}
QueryOperation::FirstAfter { after, limit } => {
let start = base64::decode(after.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok()) .and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| (idx + 1) as usize) .map(|idx| (idx + 1) as usize)
.unwrap_or(0); .unwrap_or(0);
let end = (start + *limit).min(self.len()); let end = (start + *limit).min(self.len());
(start, end) (start, end)
} }
QueryOperation::Backward { before, limit } => { QueryOperation::FirstBefore { before, limit } => {
let end = before let end_cursor = base64::decode(before.to_string())
.and_then(|before| base64::decode(before).ok()) .ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok()) .and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| idx as usize) .map(|idx| idx as usize)
.unwrap_or_else(|| self.len()); .unwrap_or_else(|| self.len());
let start = if end < *limit { 0 } else { end - *limit }; let start = (end_cursor - *limit).max(0);
let end = (start + *limit).min(end_cursor);
(start, end)
}
QueryOperation::FirstBetween {
after,
before,
limit,
} => {
let start = base64::decode(after.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| (idx + 1) as usize)
.unwrap_or(0);
let end_cursor = base64::decode(before.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| idx as usize)
.unwrap_or_else(|| self.len());
let end = (start + *limit).min(end_cursor);
(start, end)
}
QueryOperation::Last { limit } => {
let end = self.len();
let start = (end - *limit).max(0);
(start, end)
}
QueryOperation::LastAfter { after, limit } => {
let end = self.len();
let start_cursor = base64::decode(after.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| (idx + 1) as usize)
.unwrap_or(0);
let start = (end - *limit).max(start_cursor);
(start, end)
}
QueryOperation::LastBefore { before, limit } => {
let end = base64::decode(before.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| idx as usize)
.unwrap_or_else(|| self.len());
let start = (end - *limit).max(0);
(start, end)
}
QueryOperation::LastBetween {
after,
before,
limit,
} => {
let start_cursor = base64::decode(after.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| (idx + 1) as usize)
.unwrap_or(0);
let end = base64::decode(before.to_string())
.ok()
.and_then(|data| data.as_slice().read_u32::<BE>().ok())
.map(|idx| idx as usize)
.unwrap_or_else(|| self.len());
let start = (end - *limit).max(start_cursor);
(start, end)
}
QueryOperation::Invalid => {
let start = 0;
let end = 0;
(start, end) (start, end)
} }
}; };
let mut nodes = Vec::with_capacity(end - start); let mut nodes = Vec::with_capacity(end - start);
if nodes.capacity() != 0 {
for (idx, item) in self[start..end].iter().enumerate() { for (idx, item) in self[start..end].iter().enumerate() {
nodes.push(( nodes.push((
base64::encode((idx as u32).to_be_bytes()).into(), base64::encode((idx as u32).to_be_bytes()).into(),
@ -40,6 +148,7 @@ impl<'a, T: Sync> DataSource for &'a [T] {
item, item,
)); ));
} }
}
Ok(Connection::new(None, start > 0, end < self.len(), nodes)) Ok(Connection::new(None, start > 0, end < self.len(), nodes))
} }