Skip to content

Commit

Permalink
fix: deserializing tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
ngutech21 committed Feb 16, 2024
1 parent 1fca7d2 commit 3c2ce7e
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 6 deletions.
3 changes: 3 additions & 0 deletions moksha-core/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,7 @@ pub enum MokshaCoreError {

#[error("Not enough tokens")]
NotEnoughTokens,

#[error("Invalid token")]
InvalidToken,
}
1 change: 1 addition & 0 deletions moksha-core/src/fixtures/token_invalid.cashu
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
cashuAeyJ0b2tlbiI6W3sibWludCI6Imh0dHA6Ly8xMjcuMC4wLjE6MzMzOCIsInByb29mcyI6W3siYW1vdW50Ijo0LCJzZWNyZXQiOiJzR3Z3OVZwalpqNGQ0YnFFU3FvQzdwTWEiLCJDIjoiMDM3YmQ2MGY2YWE1ZTE5ZjZhOWVjMzU5MjlkOGViN2E2Yzk1Y2YyOTM5NTlmMzMzNTQzYWQ5MWIxNTkyNWU2OTE1IiwiaWQiOiJtUjlQSjNNempMMXkifSx7ImFtb3VudCI6OCwic2VjcmV0IjoiQjJqNmw4Z1VUYjIxR0hqMFRnbUNRUjZHIiwiQyI6IjAyOTQzYmI0MWY4MmY3MGE2MWIwMzM0ZGU1YjJjZjNmYzc0YmI2ZTlhZTY5OWVlMzc4YjYyMzc3ZTVhMWJiZmM5ZCIsImlkIjoibVI5UEozTXpqTDF5In0seyJhbW91bnQiOjE2LCJzZWNyZXQiOiJ2SFRHbGJoRXFBQUdEUVBteFBkczc1MFkiLCJDIjoiMDI4NDU0OGJkN2FiNjhmNTIyNzdkOTQxYTgwN2JmZjJlZWI4ZjNmY2EzYmVlODY2ODgxN2RjYTg3MGJhOGQxYWJkIiwiaWQiOiJtUjlQSjNNempMMXkifSx7ImFtb3VudCI6MzIsInNlY3JldCI6IldSajZCTXVQNTQyTFpmWXdiTldlbTJLaCIsIkMiOiIwMzc5NWE0NGUwNGY1YWU5MGYyZGIwZTkzYzc3MzJkMDJkYTQ0ZGIxZmRkMWYzNDlkN2EwMzJmN2U5OGZkYzZjYzQiLCJpZCI6Im1SOVBKM016akwxeSJ9XX1dfQVT
32 changes: 26 additions & 6 deletions moksha-core/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,12 +107,16 @@ impl TokenV3 {
}

pub fn deserialize(data: impl Into<String>) -> Result<Self, MokshaCoreError> {
let json = general_purpose::URL_SAFE_NO_PAD.decode(
data.into()
.strip_prefix(TOKEN_PREFIX_V3)
.ok_or(MokshaCoreError::InvalidTokenPrefix)?
.as_bytes(),
)?;
let data = data.into();
let token = data
.strip_prefix(TOKEN_PREFIX_V3)
.ok_or(MokshaCoreError::InvalidTokenPrefix)?;

let json = general_purpose::URL_SAFE_NO_PAD
.decode(token.as_bytes())
.or_else(|_| general_purpose::URL_SAFE.decode(token.as_bytes()))
.map_err(|_| MokshaCoreError::InvalidToken)?;

Ok(serde_json::from_slice::<Self>(&json)?)
}

Expand Down Expand Up @@ -294,4 +298,20 @@ mod tests {
assert_eq!(tokens.tokens.len(), 1);
Ok(())
}

#[test]
fn test_tokens_deserialize_with_padding() -> anyhow::Result<()> {
let input = read_fixture("token_60.cashu")?;
let tokens = TokenV3::deserialize(input)?;
assert_eq!(tokens.tokens.len(), 1);
Ok(())
}

#[test]
fn test_tokens_deserialize_invalid() -> anyhow::Result<()> {
let input = read_fixture("token_invalid.cashu")?;
let tokens = TokenV3::deserialize(input);
assert!(tokens.is_err());
Ok(())
}
}

0 comments on commit 3c2ce7e

Please sign in to comment.