Skip to content

Commit

Permalink
Return page info even for limit/offset based pagination
Browse files Browse the repository at this point in the history
  • Loading branch information
JunichiSugiura committed Nov 19, 2023
1 parent d4078a5 commit fc4f0b5
Show file tree
Hide file tree
Showing 12 changed files with 138 additions and 111 deletions.
4 changes: 2 additions & 2 deletions crates/torii/graphql/src/object/connection/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ impl ConnectionObject {
(Name::new("total_count"), TypeData::Simple(TypeRef::named_nn(TypeRef::INT))),
(
Name::new("page_info"),
TypeData::Nested((TypeRef::named(PAGE_INFO_TYPE_NAME), IndexMap::new())),
TypeData::Nested((TypeRef::named_nn(PAGE_INFO_TYPE_NAME), IndexMap::new())),
),
]);

Expand Down Expand Up @@ -117,7 +117,7 @@ pub fn connection_output(
id_column: &str,
total_count: i64,
is_external: bool,
page_info: Option<PageInfo>,
page_info: PageInfo,
) -> sqlx::Result<ValueMapping> {
let model_edges = data
.iter()
Expand Down
41 changes: 19 additions & 22 deletions crates/torii/graphql/src/object/connection/page_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,27 +31,24 @@ impl ObjectTrait for PageInfoObject {
}

impl PageInfoObject {
pub fn value(page_info: Option<PageInfo>) -> Value {
match page_info {
Some(page_info) => Value::Object(IndexMap::from([
(Name::new("has_previous_page"), Value::from(page_info.has_previous_page)),
(Name::new("has_next_page"), Value::from(page_info.has_next_page)),
(
Name::new("start_cursor"),
match page_info.start_cursor {
Some(val) => Value::from(val),
None => Value::Null,
},
),
(
Name::new("end_cursor"),
match page_info.end_cursor {
Some(val) => Value::from(val),
None => Value::Null,
},
),
])),
None => Value::Null,
}
pub fn value(page_info: PageInfo) -> Value {
Value::Object(IndexMap::from([
(Name::new("has_previous_page"), Value::from(page_info.has_previous_page)),
(Name::new("has_next_page"), Value::from(page_info.has_next_page)),
(
Name::new("start_cursor"),
match page_info.start_cursor {
Some(val) => Value::from(val),
None => Value::Null,
},
),
(
Name::new("end_cursor"),
match page_info.end_cursor {
Some(val) => Value::from(val),
None => Value::Null,
},
),
]))
}
}
1 change: 1 addition & 0 deletions crates/torii/graphql/src/object/entity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ impl ObjectTrait for EntityObject {
&None,
&None,
&connection,
total_count,
)
.await?;
let results = connection_output(
Expand Down
1 change: 1 addition & 0 deletions crates/torii/graphql/src/object/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ impl ObjectTrait for EventObject {
&None,
&None,
&connection,
total_count,
)
.await?;
let results = connection_output(
Expand Down
17 changes: 12 additions & 5 deletions crates/torii/graphql/src/object/metadata/mod.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
use async_graphql::connection::PageInfo;
use async_graphql::dynamic::{Field, FieldFuture, TypeRef};
use async_graphql::{Name, Value};
use sqlx::sqlite::SqliteRow;
use sqlx::{Pool, Row, Sqlite};

use super::connection::page_info::PageInfoObject;
use super::connection::{connection_arguments, cursor, parse_connection_arguments};
use super::ObjectTrait;
use crate::constants::{
Expand Down Expand Up @@ -54,19 +56,21 @@ impl ObjectTrait for MetadataObject {
let mut conn = ctx.data::<Pool<Sqlite>>()?.acquire().await?;
let connection = parse_connection_arguments(&ctx)?;
let total_count = count_rows(&mut conn, &table_name, &None, &None).await?;
let (data, _page_info) = fetch_multiple_rows(
let (data, page_info) = fetch_multiple_rows(
&mut conn,
&table_name,
ID_COLUMN,
&None,
&None,
&None,
&connection,
total_count,
)
.await?;

// convert json field to value_mapping expected by content object
let results = metadata_connection_output(&data, &type_mapping, total_count)?;
let results =
metadata_connection_output(&data, &type_mapping, total_count, page_info)?;

Ok(Some(Value::Object(results)))
})
Expand All @@ -85,6 +89,7 @@ fn metadata_connection_output(
data: &[SqliteRow],
types: &TypeMapping,
total_count: i64,
page_info: PageInfo,
) -> sqlx::Result<ValueMapping> {
let edges = data
.iter()
Expand All @@ -107,9 +112,10 @@ fn metadata_connection_output(

value_mapping.insert(Name::new("content"), Value::Object(content));

let mut edge = ValueMapping::new();
edge.insert(Name::new("node"), Value::Object(value_mapping));
edge.insert(Name::new("cursor"), Value::String(cursor));
let edge = ValueMapping::from([
(Name::new("node"), Value::Object(value_mapping)),
(Name::new("cursor"), Value::String(cursor)),
]);

Ok(Value::Object(edge))
})
Expand All @@ -118,6 +124,7 @@ fn metadata_connection_output(
Ok(ValueMapping::from([
(Name::new("total_count"), Value::from(total_count)),
(Name::new("edges"), Value::List(edges?)),
(Name::new("page_info"), PageInfoObject::value(page_info)),
]))
}

Expand Down
1 change: 1 addition & 0 deletions crates/torii/graphql/src/object/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ pub trait ObjectTrait: Send + Sync {
&None,
&None,
&connection,
total_count,
)
.await?;
let results = connection_output(
Expand Down
1 change: 1 addition & 0 deletions crates/torii/graphql/src/object/model_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ impl ObjectTrait for ModelDataObject {
&order,
&filters,
&connection,
total_count,
)
.await?;
let connection = connection_output(
Expand Down
105 changes: 56 additions & 49 deletions crates/torii/graphql/src/query/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ pub async fn fetch_single_row(
sqlx::query(&query).fetch_one(conn).await
}

#[allow(clippy::too_many_arguments)]
pub async fn fetch_multiple_rows(
conn: &mut PoolConnection<Sqlite>,
table_name: &str,
Expand All @@ -43,7 +44,8 @@ pub async fn fetch_multiple_rows(
order: &Option<Order>,
filters: &Option<Vec<Filter>>,
connection: &ConnectionArguments,
) -> Result<(Vec<SqliteRow>, Option<PageInfo>)> {
total_count: i64,
) -> Result<(Vec<SqliteRow>, PageInfo)> {
let mut conditions = build_conditions(keys, filters);

let mut cursor_param = &connection.after;
Expand Down Expand Up @@ -104,65 +106,70 @@ pub async fn fetch_multiple_rows(
}

let mut data = sqlx::query(&query).fetch_all(conn).await?;
let mut page_info = PageInfo {
has_previous_page: false,
has_next_page: false,
start_cursor: None,
end_cursor: None,
};

if data.is_empty() {
Ok((data, page_info))
} else if !is_cursor_based {
let offset = connection.offset.unwrap_or(0);
if 1 < offset && offset < total_count as u64 {
page_info.has_previous_page = true;
}
if limit + offset < total_count as u64 {
page_info.has_next_page = true;
}

if !is_cursor_based {
Ok((data, None))
Ok((data, page_info))
} else {
let mut page_info = PageInfo {
has_previous_page: false,
has_next_page: false,
start_cursor: None,
end_cursor: None,
let order_field = match order {
Some(order) => format!("external_{}", order.field),
None => id_column.to_string(),
};

if data.is_empty() {
Ok((data, Some(page_info)))
} else {
let order_field = match order {
Some(order) => format!("external_{}", order.field),
None => id_column.to_string(),
};
match cursor_param {
Some(cursor_query) => {
let first_cursor = cursor::encode(
&data[0].try_get::<String, &str>(id_column)?,
&data[0].try_get_unchecked::<String, &str>(&order_field)?,
);

match cursor_param {
Some(cursor_query) => {
let first_cursor = cursor::encode(
&data[0].try_get::<String, &str>(id_column)?,
&data[0].try_get_unchecked::<String, &str>(&order_field)?,
);

if &first_cursor == cursor_query && data.len() != 1 {
data.remove(0);
page_info.has_previous_page = true;
} else {
data.pop();
}

if data.len() as u64 == limit - 1 {
page_info.has_next_page = true;
data.pop();
}
if &first_cursor == cursor_query && data.len() != 1 {
data.remove(0);
page_info.has_previous_page = true;
} else {
data.pop();
}
None => {
if data.len() as u64 == limit {
page_info.has_next_page = true;
data.pop();
}

if data.len() as u64 == limit - 1 {
page_info.has_next_page = true;
data.pop();
}
}

if !data.is_empty() {
page_info.start_cursor = Some(cursor::encode(
&data[0].try_get::<String, &str>(id_column)?,
&data[0].try_get_unchecked::<String, &str>(&order_field)?,
));
page_info.end_cursor = Some(cursor::encode(
&data[data.len() - 1].try_get::<String, &str>(id_column)?,
&data[data.len() - 1].try_get_unchecked::<String, &str>(&order_field)?,
));
None => {
if data.len() as u64 == limit {
page_info.has_next_page = true;
data.pop();
}
}
}

Ok((data, Some(page_info)))
if !data.is_empty() {
page_info.start_cursor = Some(cursor::encode(
&data[0].try_get::<String, &str>(id_column)?,
&data[0].try_get_unchecked::<String, &str>(&order_field)?,
));
page_info.end_cursor = Some(cursor::encode(
&data[data.len() - 1].try_get::<String, &str>(id_column)?,
&data[data.len() - 1].try_get_unchecked::<String, &str>(&order_field)?,
));
}

Ok((data, page_info))
}
}

Expand Down
Loading

0 comments on commit fc4f0b5

Please sign in to comment.