Skip to content

Commit

Permalink
Return page info even for limit/offset based pagination
Browse files Browse the repository at this point in the history
  • Loading branch information
JunichiSugiura committed Nov 19, 2023
1 parent d4078a5 commit 74fce55
Show file tree
Hide file tree
Showing 10 changed files with 131 additions and 113 deletions.
4 changes: 2 additions & 2 deletions crates/torii/graphql/src/object/connection/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ impl ConnectionObject {
(Name::new("total_count"), TypeData::Simple(TypeRef::named_nn(TypeRef::INT))),
(
Name::new("page_info"),
TypeData::Nested((TypeRef::named(PAGE_INFO_TYPE_NAME), IndexMap::new())),
TypeData::Nested((TypeRef::named_nn(PAGE_INFO_TYPE_NAME), IndexMap::new())),
),
]);

Expand Down Expand Up @@ -117,7 +117,7 @@ pub fn connection_output(
id_column: &str,
total_count: i64,
is_external: bool,
page_info: Option<PageInfo>,
page_info: PageInfo,
) -> sqlx::Result<ValueMapping> {
let model_edges = data
.iter()
Expand Down
41 changes: 19 additions & 22 deletions crates/torii/graphql/src/object/connection/page_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,27 +31,24 @@ impl ObjectTrait for PageInfoObject {
}

impl PageInfoObject {
pub fn value(page_info: Option<PageInfo>) -> Value {
match page_info {
Some(page_info) => Value::Object(IndexMap::from([
(Name::new("has_previous_page"), Value::from(page_info.has_previous_page)),
(Name::new("has_next_page"), Value::from(page_info.has_next_page)),
(
Name::new("start_cursor"),
match page_info.start_cursor {
Some(val) => Value::from(val),
None => Value::Null,
},
),
(
Name::new("end_cursor"),
match page_info.end_cursor {
Some(val) => Value::from(val),
None => Value::Null,
},
),
])),
None => Value::Null,
}
pub fn value(page_info: PageInfo) -> Value {
Value::Object(IndexMap::from([
(Name::new("has_previous_page"), Value::from(page_info.has_previous_page)),
(Name::new("has_next_page"), Value::from(page_info.has_next_page)),
(
Name::new("start_cursor"),
match page_info.start_cursor {
Some(val) => Value::from(val),
None => Value::Null,
},
),
(
Name::new("end_cursor"),
match page_info.end_cursor {
Some(val) => Value::from(val),
None => Value::Null,
},
),
]))
}
}
13 changes: 8 additions & 5 deletions crates/torii/graphql/src/object/entity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ impl ObjectTrait for EntityObject {
&None,
&None,
&connection,
total_count,
)
.await?;
let results = connection_output(
Expand All @@ -84,8 +85,10 @@ impl ObjectTrait for EntityObject {
}

fn subscriptions(&self) -> Option<Vec<SubscriptionField>> {
Some(vec![
SubscriptionField::new("entityUpdated", TypeRef::named_nn(self.type_name()), |ctx| {
Some(vec![SubscriptionField::new(
"entityUpdated",
TypeRef::named_nn(self.type_name()),
|ctx| {
SubscriptionFieldFuture::new(async move {
let id = match ctx.args.get("id") {
Some(id) => Some(id.string()?.to_string()),
Expand All @@ -102,9 +105,9 @@ impl ObjectTrait for EntityObject {
}
}))
})
})
.argument(InputValue::new("id", TypeRef::named(TypeRef::ID))),
])
},
)
.argument(InputValue::new("id", TypeRef::named(TypeRef::ID)))])
}
}

Expand Down
13 changes: 8 additions & 5 deletions crates/torii/graphql/src/object/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ impl ObjectTrait for EventObject {
&None,
&None,
&connection,
total_count,
)
.await?;
let results = connection_output(
Expand All @@ -81,15 +82,17 @@ impl ObjectTrait for EventObject {
}

fn subscriptions(&self) -> Option<Vec<SubscriptionField>> {
Some(vec![
SubscriptionField::new("eventEmitted", TypeRef::named_nn(self.type_name()), |ctx| {
Some(vec![SubscriptionField::new(
"eventEmitted",
TypeRef::named_nn(self.type_name()),
|ctx| {
SubscriptionFieldFuture::new(async move {
let input_keys = parse_keys_argument(&ctx)?;
Ok(EventObject::subscription_stream(input_keys))
})
})
.argument(InputValue::new("keys", TypeRef::named_list(TypeRef::STRING))),
])
},
)
.argument(InputValue::new("keys", TypeRef::named_list(TypeRef::STRING)))])
}
}

Expand Down
1 change: 1 addition & 0 deletions crates/torii/graphql/src/object/metadata/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ impl ObjectTrait for MetadataObject {
&None,
&None,
&connection,
total_count,
)
.await?;

Expand Down
1 change: 1 addition & 0 deletions crates/torii/graphql/src/object/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ pub trait ObjectTrait: Send + Sync {
&None,
&None,
&connection,
total_count,
)
.await?;
let results = connection_output(
Expand Down
1 change: 1 addition & 0 deletions crates/torii/graphql/src/object/model_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ impl ObjectTrait for ModelDataObject {
&order,
&filters,
&connection,
total_count,
)
.await?;
let connection = connection_output(
Expand Down
104 changes: 55 additions & 49 deletions crates/torii/graphql/src/query/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ pub async fn fetch_multiple_rows(
order: &Option<Order>,
filters: &Option<Vec<Filter>>,
connection: &ConnectionArguments,
) -> Result<(Vec<SqliteRow>, Option<PageInfo>)> {
total_count: i64,
) -> Result<(Vec<SqliteRow>, PageInfo)> {
let mut conditions = build_conditions(keys, filters);

let mut cursor_param = &connection.after;
Expand Down Expand Up @@ -104,65 +105,70 @@ pub async fn fetch_multiple_rows(
}

let mut data = sqlx::query(&query).fetch_all(conn).await?;
let mut page_info = PageInfo {
has_previous_page: false,
has_next_page: false,
start_cursor: None,
end_cursor: None,
};

if data.is_empty() {
Ok((data, page_info))
} else if !is_cursor_based {
let offset = connection.offset.unwrap_or(0);
if 1 < offset && offset < total_count as u64 {
page_info.has_previous_page = true;
}
if limit + offset < total_count as u64 {
page_info.has_next_page = true;
}

if !is_cursor_based {
Ok((data, None))
Ok((data, page_info))
} else {
let mut page_info = PageInfo {
has_previous_page: false,
has_next_page: false,
start_cursor: None,
end_cursor: None,
let order_field = match order {
Some(order) => format!("external_{}", order.field),
None => id_column.to_string(),
};

if data.is_empty() {
Ok((data, Some(page_info)))
} else {
let order_field = match order {
Some(order) => format!("external_{}", order.field),
None => id_column.to_string(),
};
match cursor_param {
Some(cursor_query) => {
let first_cursor = cursor::encode(
&data[0].try_get::<String, &str>(id_column)?,
&data[0].try_get_unchecked::<String, &str>(&order_field)?,
);

match cursor_param {
Some(cursor_query) => {
let first_cursor = cursor::encode(
&data[0].try_get::<String, &str>(id_column)?,
&data[0].try_get_unchecked::<String, &str>(&order_field)?,
);

if &first_cursor == cursor_query && data.len() != 1 {
data.remove(0);
page_info.has_previous_page = true;
} else {
data.pop();
}

if data.len() as u64 == limit - 1 {
page_info.has_next_page = true;
data.pop();
}
if &first_cursor == cursor_query && data.len() != 1 {
data.remove(0);
page_info.has_previous_page = true;
} else {
data.pop();
}
None => {
if data.len() as u64 == limit {
page_info.has_next_page = true;
data.pop();
}

if data.len() as u64 == limit - 1 {
page_info.has_next_page = true;
data.pop();
}
}

if !data.is_empty() {
page_info.start_cursor = Some(cursor::encode(
&data[0].try_get::<String, &str>(id_column)?,
&data[0].try_get_unchecked::<String, &str>(&order_field)?,
));
page_info.end_cursor = Some(cursor::encode(
&data[data.len() - 1].try_get::<String, &str>(id_column)?,
&data[data.len() - 1].try_get_unchecked::<String, &str>(&order_field)?,
));
None => {
if data.len() as u64 == limit {
page_info.has_next_page = true;
data.pop();
}
}
}

Ok((data, Some(page_info)))
if !data.is_empty() {
page_info.start_cursor = Some(cursor::encode(
&data[0].try_get::<String, &str>(id_column)?,
&data[0].try_get_unchecked::<String, &str>(&order_field)?,
));
page_info.end_cursor = Some(cursor::encode(
&data[data.len() - 1].try_get::<String, &str>(id_column)?,
&data[data.len() - 1].try_get_unchecked::<String, &str>(&order_field)?,
));
}

Ok((data, page_info))
}
}

Expand Down
63 changes: 35 additions & 28 deletions crates/torii/graphql/src/tests/entities_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,11 +137,10 @@ mod tests {
assert_eq!(connection.edges.first().unwrap(), three);
assert_eq!(connection.edges.last().unwrap(), four);

let page_info = connection.page_info.take().unwrap();
assert_eq!(page_info.has_previous_page, true);
assert_eq!(page_info.has_next_page, true);
assert_eq!(page_info.start_cursor.unwrap(), three.cursor);
assert_eq!(page_info.end_cursor.unwrap(), four.cursor);
assert_eq!(connection.page_info.has_previous_page, true);
assert_eq!(connection.page_info.has_next_page, true);
assert_eq!(connection.page_info.start_cursor.unwrap(), three.cursor);
assert_eq!(connection.page_info.end_cursor.unwrap(), four.cursor);

let entities =
entities_query(&schema, &format!("(first: 3, after: \"{}\")", three.cursor)).await;
Expand All @@ -150,11 +149,10 @@ mod tests {
assert_eq!(connection.edges.first().unwrap(), four);
assert_eq!(connection.edges.last().unwrap(), six);

let page_info = connection.page_info.take().unwrap();
assert_eq!(page_info.has_previous_page, true);
assert_eq!(page_info.has_next_page, true);
assert_eq!(page_info.start_cursor.unwrap(), four.cursor);
assert_eq!(page_info.end_cursor.unwrap(), six.cursor);
assert_eq!(connection.page_info.has_previous_page, true);
assert_eq!(connection.page_info.has_next_page, true);
assert_eq!(connection.page_info.start_cursor.unwrap(), four.cursor);
assert_eq!(connection.page_info.end_cursor.unwrap(), six.cursor);

// cursor based backward pagination
let entities =
Expand All @@ -164,11 +162,10 @@ mod tests {
assert_eq!(connection.edges.first().unwrap(), six);
assert_eq!(connection.edges.last().unwrap(), five);

let page_info = connection.page_info.take().unwrap();
assert_eq!(page_info.has_previous_page, true);
assert_eq!(page_info.has_next_page, true);
assert_eq!(page_info.start_cursor.unwrap(), six.cursor);
assert_eq!(page_info.end_cursor.unwrap(), five.cursor);
assert_eq!(connection.page_info.has_previous_page, true);
assert_eq!(connection.page_info.has_next_page, true);
assert_eq!(connection.page_info.start_cursor.unwrap(), six.cursor);
assert_eq!(connection.page_info.end_cursor.unwrap(), five.cursor);

let entities =
entities_query(&schema, &format!("(last: 3, before: \"{}\")", six.cursor)).await;
Expand All @@ -177,11 +174,10 @@ mod tests {
assert_eq!(connection.edges.first().unwrap(), five);
assert_eq!(connection.edges.last().unwrap(), three);

let page_info = connection.page_info.take().unwrap();
assert_eq!(page_info.has_previous_page, true);
assert_eq!(page_info.has_next_page, true);
assert_eq!(page_info.start_cursor.unwrap(), five.cursor);
assert_eq!(page_info.end_cursor.unwrap(), three.cursor);
assert_eq!(connection.page_info.has_previous_page, true);
assert_eq!(connection.page_info.has_next_page, true);
assert_eq!(connection.page_info.start_cursor.unwrap(), five.cursor);
assert_eq!(connection.page_info.end_cursor.unwrap(), three.cursor);

let empty_entities = entities_query(
&schema,
Expand All @@ -194,31 +190,42 @@ mod tests {
let connection: Connection<Entity> = serde_json::from_value(empty_entities).unwrap();
assert_eq!(connection.edges.len(), 0);

let page_info = connection.page_info.take().unwrap();
assert_eq!(page_info.has_previous_page, false);
assert_eq!(page_info.has_next_page, false);
assert_eq!(page_info.start_cursor, None);
assert_eq!(page_info.end_cursor, None);
assert_eq!(connection.page_info.has_previous_page, false);
assert_eq!(connection.page_info.has_next_page, false);
assert_eq!(connection.page_info.start_cursor, None);
assert_eq!(connection.page_info.end_cursor, None);

// offset/limit based pagination
let entities = entities_query(&schema, "(limit: 2)").await;
let connection: Connection<Entity> = serde_json::from_value(entities).unwrap();
assert_eq!(connection.edges.len(), 2);
assert_eq!(connection.edges.first().unwrap(), one);
assert_eq!(connection.edges.last().unwrap(), two);
assert!(connection.page_info.is_null());

assert_eq!(connection.page_info.has_previous_page, false);
assert_eq!(connection.page_info.has_next_page, true);
assert_eq!(connection.page_info.start_cursor, None);
assert_eq!(connection.page_info.end_cursor, None);

let entities = entities_query(&schema, "(limit: 3, offset: 2)").await;
let connection: Connection<Entity> = serde_json::from_value(entities).unwrap();
assert_eq!(connection.edges.len(), 3);
assert_eq!(connection.edges.first().unwrap(), three);
assert_eq!(connection.edges.last().unwrap(), five);
assert!(connection.page_info.is_null());

assert_eq!(connection.page_info.has_previous_page, true);
assert_eq!(connection.page_info.has_next_page, true);
assert_eq!(connection.page_info.start_cursor, None);
assert_eq!(connection.page_info.end_cursor, None);

let empty_entities = entities_query(&schema, "(limit: 1, offset: 20)").await;
let connection: Connection<Entity> = serde_json::from_value(empty_entities).unwrap();
assert_eq!(connection.edges.len(), 0);
assert!(connection.page_info.is_null());

assert_eq!(connection.page_info.has_previous_page, false);
assert_eq!(connection.page_info.has_next_page, false);
assert_eq!(connection.page_info.start_cursor, None);
assert_eq!(connection.page_info.end_cursor, None);

// entity model union
let id = poseidon_hash_many(&[FieldElement::ZERO]);
Expand Down
Loading

0 comments on commit 74fce55

Please sign in to comment.