diff --git a/moksha-core/src/error.rs b/moksha-core/src/error.rs
index f01e57c4..1931f4cc 100644
--- a/moksha-core/src/error.rs
+++ b/moksha-core/src/error.rs
@@ -25,4 +25,7 @@ pub enum MokshaCoreError {
 
     #[error("Not enough tokens")]
     NotEnoughTokens,
+
+    #[error("Invalid token")]
+    InvalidToken,
 }
diff --git a/moksha-core/src/fixtures/token_invalid.cashu b/moksha-core/src/fixtures/token_invalid.cashu
new file mode 100644
index 00000000..b3bffbdf
--- /dev/null
+++ b/moksha-core/src/fixtures/token_invalid.cashu
@@ -0,0 +1 @@
+cashuAeyJ0b2tlbiI6W3sibWludCI6Imh0dHA6Ly8xMjcuMC4wLjE6MzMzOCIsInByb29mcyI6W3siYW1vdW50Ijo0LCJzZWNyZXQiOiJzR3Z3OVZwalpqNGQ0YnFFU3FvQzdwTWEiLCJDIjoiMDM3YmQ2MGY2YWE1ZTE5ZjZhOWVjMzU5MjlkOGViN2E2Yzk1Y2YyOTM5NTlmMzMzNTQzYWQ5MWIxNTkyNWU2OTE1IiwiaWQiOiJtUjlQSjNNempMMXkifSx7ImFtb3VudCI6OCwic2VjcmV0IjoiQjJqNmw4Z1VUYjIxR0hqMFRnbUNRUjZHIiwiQyI6IjAyOTQzYmI0MWY4MmY3MGE2MWIwMzM0ZGU1YjJjZjNmYzc0YmI2ZTlhZTY5OWVlMzc4YjYyMzc3ZTVhMWJiZmM5ZCIsImlkIjoibVI5UEozTXpqTDF5In0seyJhbW91bnQiOjE2LCJzZWNyZXQiOiJ2SFRHbGJoRXFBQUdEUVBteFBkczc1MFkiLCJDIjoiMDI4NDU0OGJkN2FiNjhmNTIyNzdkOTQxYTgwN2JmZjJlZWI4ZjNmY2EzYmVlODY2ODgxN2RjYTg3MGJhOGQxYWJkIiwiaWQiOiJtUjlQSjNNempMMXkifSx7ImFtb3VudCI6MzIsInNlY3JldCI6IldSajZCTXVQNTQyTFpmWXdiTldlbTJLaCIsIkMiOiIwMzc5NWE0NGUwNGY1YWU5MGYyZGIwZTkzYzc3MzJkMDJkYTQ0ZGIxZmRkMWYzNDlkN2EwMzJmN2U5OGZkYzZjYzQiLCJpZCI6Im1SOVBKM016akwxeSJ9XX1dfQVT
\ No newline at end of file
diff --git a/moksha-core/src/token.rs b/moksha-core/src/token.rs
index 7d4eb4ff..91e26fad 100644
--- a/moksha-core/src/token.rs
+++ b/moksha-core/src/token.rs
@@ -107,12 +107,16 @@ impl TokenV3 {
     }
 
     pub fn deserialize(data: impl Into<String>) -> Result<Self, MokshaCoreError> {
-        let json = general_purpose::URL_SAFE_NO_PAD.decode(
-            data.into()
-                .strip_prefix(TOKEN_PREFIX_V3)
-                .ok_or(MokshaCoreError::InvalidTokenPrefix)?
-                .as_bytes(),
-        )?;
+        let data = data.into();
+        let token = data
+            .strip_prefix(TOKEN_PREFIX_V3)
+            .ok_or(MokshaCoreError::InvalidTokenPrefix)?;
+
+        let json = general_purpose::URL_SAFE_NO_PAD
+            .decode(token.as_bytes())
+            .or_else(|_| general_purpose::URL_SAFE.decode(token.as_bytes()))
+            .map_err(|_| MokshaCoreError::InvalidToken)?;
+
         Ok(serde_json::from_slice::<Self>(&json)?)
     }
 
@@ -294,4 +298,20 @@ mod tests {
         assert_eq!(tokens.tokens.len(), 1);
         Ok(())
     }
+
+    #[test]
+    fn test_tokens_deserialize_with_padding() -> anyhow::Result<()> {
+        let input = read_fixture("token_60.cashu")?;
+        let tokens = TokenV3::deserialize(input)?;
+        assert_eq!(tokens.tokens.len(), 1);
+        Ok(())
+    }
+
+    #[test]
+    fn test_tokens_deserialize_invalid() -> anyhow::Result<()> {
+        let input = read_fixture("token_invalid.cashu")?;
+        let tokens = TokenV3::deserialize(input);
+        assert!(tokens.is_err());
+        Ok(())
+    }
 }