Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
callebtc committed Jul 9, 2024
1 parent 4ceb17c commit 2887d8d
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 86 deletions.
44 changes: 32 additions & 12 deletions cashu/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

import cbor2
from loguru import logger
from pydantic import BaseModel, root_validator
from pydantic import BaseModel, PrivateAttr, root_validator

from cashu.core.json_rpc.base import JSONRPCSubscriptionKinds

Expand Down Expand Up @@ -812,35 +812,53 @@ class TokenV3(BaseModel, Token):
"""

token: List[TokenV3Token] = []
memo: Optional[str] = None
unit: str = "sat"
_memo: Optional[str] = PrivateAttr(None)
_unit: str = PrivateAttr("sat")

class Config:
allow_population_by_field_name = True

@property
def proofs(self):
def proofs(self) -> List[Proof]:
return [proof for token in self.token for proof in token.proofs]

@property
def amount(self):
def amount(self) -> int:
return sum([p.amount for p in self.proofs])

@property
def keysets(self):
def keysets(self) -> List[str]:
return list(set([p.id for p in self.proofs]))

@property
def mint(self):
def mint(self) -> str:
return self.mints[0]

@property
def mints(self):
def mints(self) -> List[str]:
return list(set([t.mint for t in self.token if t.mint]))

@property
def memo(self) -> Optional[str]:
return str(self._memo) if self._memo else None

@memo.setter
def memo(self, memo: Optional[str]):
self._memo = memo

@property
def unit(self) -> str:
return self._unit

@unit.setter
def unit(self, unit: str):
self._unit = unit

def serialize_to_dict(self, include_dleq=False):
return_dict = dict(token=[t.to_dict(include_dleq) for t in self.token])
if self.memo:
return_dict.update(dict(memo=self.memo)) # type: ignore
if self.unit:
return_dict.update(dict(unit=self.unit)) # type: ignore
return_dict.update(dict(unit=self.unit)) # type: ignore
return return_dict

@classmethod
Expand All @@ -867,7 +885,9 @@ def serialize(self, include_dleq=False) -> str:
tokenv3_serialized = prefix
# encode the token as a base64 string
tokenv3_serialized += base64.urlsafe_b64encode(
json.dumps(self.serialize_to_dict(include_dleq)).encode()
json.dumps(
self.serialize_to_dict(include_dleq), separators=(",", ":")
).encode()
).decode()
return tokenv3_serialized

Expand Down Expand Up @@ -1082,7 +1102,7 @@ def deserialize(cls, tokenv4_serialized: str) -> "TokenV4":
return cls.parse_obj(token)

def to_tokenv3(self) -> TokenV3:
tokenv3 = TokenV3()
tokenv3 = TokenV3(_memo=self.d, _unit=self.u)
for token in self.t:
tokenv3.token.append(
TokenV3Token(
Expand Down
2 changes: 1 addition & 1 deletion cashu/wallet/cli/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -720,7 +720,7 @@ async def pending(ctx: Context, legacy, number: int, offset: int):
reserved_proofs = await get_reserved_proofs(wallet.db)
if len(reserved_proofs):
print("--------------------------\n")
sorted_proofs = sorted(reserved_proofs, key=itemgetter("send_id")) # type: ignore
sorted_proofs = sorted(reserved_proofs, key=itemgetter("send_id"), reverse=True) # type: ignore
if number:
number += offset
for i, (key, value) in islice(
Expand Down
2 changes: 1 addition & 1 deletion cashu/wallet/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def deserialize_token_from_string(token: str) -> Token:
try:
return TokenV4.from_tokenv3(tokenV3Obj)
except ValueError as e:
logger.debug(f"Error converting TokenV3 to TokenV4: {e}")
logger.debug(f"Could not convert TokenV3 to TokenV4: {e}")
return tokenV3Obj
if token.startswith("cashuB"):
tokenObj = TokenV4.deserialize(token)
Expand Down
16 changes: 10 additions & 6 deletions cashu/wallet/proofs.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,17 +132,21 @@ async def _make_tokenv3(
Returns:
TokenV3: TokenV3 object
"""
token = TokenV3(memo=memo)

# extract all keysets from proofs
keysets = self._get_proofs_keyset_ids(proofs)
# extract all keysets IDs from proofs
keyset_ids = self._get_proofs_keyset_ids(proofs)
keysets = {k.id: k for k in self.keysets.values() if k.id in keyset_ids}
assert (
set([k.unit for k in self.keysets.values()]) == 1
len(set([k.unit for k in keysets.values()])) == 1
), "All keysets must have the same unit"
token.unit = self.keysets[keysets[0]].unit.name
unit = keysets[list(keysets.keys())[0]].unit

token = TokenV3()
token.memo = memo
token.unit = unit.name
assert token.memo == memo, f"Memo not set correctly: {token.memo}"
# get all mint URLs for all unique keysets from db
mint_urls = await self._get_keyset_urls(keysets)
mint_urls = await self._get_keyset_urls(list(keysets.keys()))

# append all url-grouped proofs to token
for url, ids in mint_urls.items():
Expand Down
73 changes: 7 additions & 66 deletions tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,86 +10,27 @@ def test_get_output_split():


def test_tokenv3_deserialize_get_attributes():
token_str = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIkplaFpMVTZuQ3BSZCIsICJhbW91bnQiOiAyLCAic2VjcmV0IjogIjBFN2lDazRkVmxSZjVQRjFnNFpWMnci"
"LCAiQyI6ICIwM2FiNTgwYWQ5NTc3OGVkNTI5NmY4YmVlNjU1ZGJkN2Q2NDJmNWQzMmRlOGUyNDg0NzdlMGI0ZDZhYTg2M2ZjZDUifSwgeyJpZCI6ICJKZWhaTFU2bkNwUmQiLCAiYW"
"1vdW50IjogOCwgInNlY3JldCI6ICJzNklwZXh3SGNxcXVLZDZYbW9qTDJnIiwgIkMiOiAiMDIyZDAwNGY5ZWMxNmE1OGFkOTAxNGMyNTliNmQ2MTRlZDM2ODgyOWYwMmMzODc3M2M0"
"NzIyMWY0OTYxY2UzZjIzIn1dLCAibWludCI6ICJodHRwOi8vbG9jYWxob3N0OjMzMzgifV19"
)
token_str = "cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjAwYWQyNjhjNGQxZjU4MjYiLCAiYW1vdW50IjogOCwgInNlY3JldCI6ICJjNTA5YzM4MmM2NjJkYWJiYjRkMGM1ZjllYTI1NjAwZTNhYjViMTIzYWNlNmNiNzljYTM1OWE4NTQwOGZlY2I3IiwgIkMiOiAiMDMwZTNkNDdkM2NlMjNkZTkzNTM3MjQ1NGJjOTMxMTJjZmExN2VmYWNkYjZjNWM2NDNmODVjOGFmM2JlNWQwMWEwIn0sIHsiaWQiOiAiMDBhZDI2OGM0ZDFmNTgyNiIsICJhbW91bnQiOiAyLCAic2VjcmV0IjogIjgxYjhiYjFhN2Q2MGQwZGZiMjkxNmZjZmU4NzUxZmRhZGJjZTU2NDZmMmEyYTQzY2FkMDY4YjUzNzJlN2M5NGMiLCAiQyI6ICIwMzUxN2E0OGYxMmU0NWQ0YzU4ZGUyMTZhNDNjYzgxNDMwMjMxY2YyYjA4OWQzMjY3MDlkMGYyZDAwYjc0N2VmYzcifV0sICJtaW50IjogImh0dHA6Ly9sb2NhbGhvc3Q6MzMzOCJ9XSwgInVuaXQiOiAic2F0In0="
token = TokenV3.deserialize(token_str)
assert token.get_amount() == 10
assert len(token.get_proofs()) == 2
assert token.amount == 10
assert len(token.proofs) == 2


def test_tokenv3_deserialize_serialize():
token_str = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIkplaFpMVTZuQ3BSZCIsICJh"
"bW91bnQiOiAyLCAic2VjcmV0IjogIjBFN2lDazRkVmxSZjVQRjFnNFpWMnci"
"LCAiQyI6ICIwM2FiNTgwYWQ5NTc3OGVkNTI5NmY4YmVlNjU1ZGJkN2Q2NDJmNWQzMmRlOG"
"UyNDg0NzdlMGI0ZDZhYTg2M2ZjZDUifSwgeyJpZCI6ICJKZWhaTFU2bkNwUmQiLCAiYW"
"1vdW50IjogOCwgInNlY3JldCI6ICJzNklwZXh3SGNxcXVLZDZYbW9qTDJnIiwgIkMiOiAiM"
"DIyZDAwNGY5ZWMxNmE1OGFkOTAxNGMyNTliNmQ2MTRlZDM2ODgyOWYwMmMzODc3M2M0"
"NzIyMWY0OTYxY2UzZjIzIn1dLCAibWludCI6ICJodHRwOi8vbG9jYWxob3N0OjMzMzgifV19"
)
token_str = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6ImVmNTRkOTg2NDQxNjA1MjY3YzZhNmU3MzJmZWZlMWRhNzViNWU5ZmY3MzZkODQxNmYwYmE4MmM4OTNlMWUyYWUiLCJDIjoiMDI2OTQ4YWFlY2FiZjJlZGVjYWU1M2YzYWIyMjNkZGFhMTRhNmY4MjJhZWNjZGMxYjAxNmVlODg0NDYwYjBjMTVjIn0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImZkMmZkZWI1NzI0N2QzZTJlNjg4YmEyZDI1OGYzN2U0NjY4ZjI2MGM1MGUzZDBjOWRkNWE5Njk0YjQ1ZmQ4OWMiLCJDIjoiMDNjNjc0NWQ5MjA1NDAzMTk3NzA1YWIyN2M1YzEzNDMwNjdmYmU1MTZhMTM2NDE4M2MzMTBmZDY5MmZkNGQzM2ZjIn1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJ1bml0Ijoic2F0In0="
token = TokenV3.deserialize(token_str)
assert token.serialize() == token_str


def test_tokenv3_deserialize_serialize_with_dleq():
token_str = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93M"
"SIsICJhbW91bnQiOiAyLCAic2VjcmV0IjogIjZmZjFiY2VlOGUzMzk2NGE4ZDNjNGQ5NzYwNzdiZ"
"DI4ZGVkZWJkODYyMDU0MDQzNDY4ZjU5ZDFiZjI1OTQzN2QiLCAiQyI6ICIwM2I3ZD"
"lkMzIzYTAxOWJlNTE4NzRlOGE5OGY1NDViOTg3Y2JmNmU5MWUwMDc1YTFhZjQ3MjY2NDMxOGRlZ"
"TQzZTUiLCAiZGxlcSI6IHsiZSI6ICI1ZjkxMGQ4NTc0M2U0OTI0ZjRiNjlkNzhjM"
"jFjYTc1ZjEzNzg3Zjc3OTE1NWRmMjMzMjJmYTA1YjU5ODdhYzNmIiwgInMiOiAiZTc4Y2U0MzNiZ"
"WNlZTNjNGU1NzM4ZDdjMzRlNDQyZWQ0MmJkMzk0MjI0ZTc3MjE4OGFjMmI5MzZmM"
"jA2Y2QxYSIsICJyIjogIjI3MzM3ODNmOTQ4MWZlYzAxNzdlYmM4ZjBhOTI2OWVjOGFkNzU5MDU2ZT"
"k3MTRiMWEwYTEwMDQ3MmY2Y2Y5YzIifX0sIHsiaWQiOiAiMWNDTklBWjJYL3cxIi"
"wgImFtb3VudCI6IDgsICJzZWNyZXQiOiAiMmFkNDMyZDRkNTg2MzJiMmRlMzI0ZmQxYmE5OTcyZmE"
"4MDljNmU3ZGE1ZTkyZWVmYjBiNjYxMmQ5M2Q3ZTAwMCIsICJDIjogIjAzMmFmYjg"
"zOWQwMmRmMWNhOGY5ZGZjNTI1NzUxN2Q0MzY4YjdiMTc0MzgzM2JlYWUzZDQzNmExYmQwYmJkYjVk"
"OCIsICJkbGVxIjogeyJlIjogImY0NjM2MzU5YTUzZGQxNGEyNmUyNTMyMDQxZWIx"
"MDE2OTk1ZTg4NzgwODY0OWFlY2VlNTcwZTA5ZTk2NTU3YzIiLCAicyI6ICJmZWYzMGIzMDcwMDJkMW"
"VjNWZiZjg0ZGZhZmRkMGEwOTdkNDJlMDYxNTZiNzdiMTMzMmNjNGZjNGNjYWEyOD"
"JmIiwgInIiOiAiODQ5MjQxNzBlYzc3ZjhjMDNmZDRlZTkyZTA3MjdlMzYyNTliZjRhYTc4NTBjZTc2"
"NDExMDQ0MmNlNmVlM2FjYyJ9fV0sICJtaW50IjogImh0dHA6Ly9sb2NhbGhvc3Q6MzMzOCJ9XX0="
)
token_str = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6IjI4MDliZjk5YTgzOGJkNzU1NjAwNTVmMjFlNzZiNzYwOTEwMGE1M2FhMzJiNDUwMDRiOGEzMzU0NDgwN2Q4N2MiLCJDIjoiMDJhZWFmNmE5OGY1MjQzZGY4YTA0OGIzNzYzMjUzNjYxZTgxMjFkODhlZTIzNGZmM2ZmYjQ2ZWM0YWIyOWIyYmJlIiwiZGxlcSI6eyJlIjoiNThlOGYzYTVkOGE2M2M1NGJkMjM5YzE4ZWJkMWUxZWFiZmJkZWMyMzhkNDBjZWExOGJjOWJmY2M0NjIyNGRjYyIsInMiOiIwNGYwM2FkMTA3MTE4NGQzZWIyOTNlYjRhMWI3MGY1OTQ0Mjg1NmJhYzNmNWJjZDE2OWJkMmVhOGVkNmY0NjlhIiwiciI6IjExZDUwMjRhM2U3N2Q0MzNhN2VjMTgwOGE5NzgzNGY2MzlhYjVkYjZhNjZhNmQzYWZlM2M4NGUyNmEzZWM3MDcifX0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImY1OGNhNmMwYTA3YWI2YjFmMWUzYjIzYWU3MDc3ODE5MzNiMGExNTExYWYzZWQyMjRmY2FjNzgxM2NhZTQ5OTYiLCJDIjoiMDNiZDVhMWFmN2NkMTY0MjA2MmU5NjRlYTZjOTlhZGRiNWI1YjRhYmY5Y2VmMjhjZWRhYmJhNGFlM2QyYmUyOGRmIiwiZGxlcSI6eyJlIjoiMmQzNTc2YzU3ZTM5ZjFiMzAzN2RmYjdhYmViOTE2M2I2ZGIxMjExMTBjNTZiY2NkYzhmMTcyN2MzZTg4NjQyNyIsInMiOiIzNDg2M2MxNDU5ZmI5MTk4ZjNhNjAyYzZhMWRkYmExNzc0NWUzN2M2ZGNiMjNiMmQxMmU3NGM3YzE3MjZiOWYwIiwiciI6IjYxNzRlNjBiZjU4MGYyOTBiNTIwZjMxYzc5MjBlNTA3MDkxNmJmYzRmZTc0MDNhNjY5ZThlNmEzOThhNGQ3YTcifX1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJ1bml0Ijoic2F0In0="
token = TokenV3.deserialize(token_str)
assert token.serialize(include_dleq=True) == token_str


def test_tokenv3_deserialize_serialize_no_dleq():
token_str = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93MSIsICJhb"
"W91bnQiOiAyLCAic2VjcmV0IjogIjZmZjFiY2VlOGUzMzk2NGE4ZDNjNGQ5NzYwNzdiZ"
"DI4ZGVkZWJkODYyMDU0MDQzNDY4ZjU5ZDFiZjI1OTQzN2QiLCAiQyI6ICIwM2I3ZDlkMzIzY"
"TAxOWJlNTE4NzRlOGE5OGY1NDViOTg3Y2JmNmU5MWUwMDc1YTFhZjQ3MjY2NDMxOGRlZ"
"TQzZTUiLCAiZGxlcSI6IHsiZSI6ICI1ZjkxMGQ4NTc0M2U0OTI0ZjRiNjlkNzhjMjFjYTc1Z"
"jEzNzg3Zjc3OTE1NWRmMjMzMjJmYTA1YjU5ODdhYzNmIiwgInMiOiAiZTc4Y2U0MzNiZ"
"WNlZTNjNGU1NzM4ZDdjMzRlNDQyZWQ0MmJkMzk0MjI0ZTc3MjE4OGFjMmI5MzZmMjA2Y2QxY"
"SIsICJyIjogIjI3MzM3ODNmOTQ4MWZlYzAxNzdlYmM4ZjBhOTI2OWVjOGFkNzU5MDU2ZT"
"k3MTRiMWEwYTEwMDQ3MmY2Y2Y5YzIifX0sIHsiaWQiOiAiMWNDTklBWjJYL3cxIiwgImFtb3"
"VudCI6IDgsICJzZWNyZXQiOiAiMmFkNDMyZDRkNTg2MzJiMmRlMzI0ZmQxYmE5OTcyZmE"
"4MDljNmU3ZGE1ZTkyZWVmYjBiNjYxMmQ5M2Q3ZTAwMCIsICJDIjogIjAzMmFmYjgzOWQwMmR"
"mMWNhOGY5ZGZjNTI1NzUxN2Q0MzY4YjdiMTc0MzgzM2JlYWUzZDQzNmExYmQwYmJkYjVk"
"OCIsICJkbGVxIjogeyJlIjogImY0NjM2MzU5YTUzZGQxNGEyNmUyNTMyMDQxZWIxMDE2OTk1"
"ZTg4NzgwODY0OWFlY2VlNTcwZTA5ZTk2NTU3YzIiLCAicyI6ICJmZWYzMGIzMDcwMDJkMW"
"VjNWZiZjg0ZGZhZmRkMGEwOTdkNDJlMDYxNTZiNzdiMTMzMmNjNGZjNGNjYWEyODJmIiwgIn"
"IiOiAiODQ5MjQxNzBlYzc3ZjhjMDNmZDRlZTkyZTA3MjdlMzYyNTliZjRhYTc4NTBjZTc2"
"NDExMDQ0MmNlNmVlM2FjYyJ9fV0sICJtaW50IjogImh0dHA6Ly9sb2NhbGhvc3Q6MzMzOCJ9XX0="
)
token_str_no_dleq = (
"cashuAeyJ0b2tlbiI6IFt7InByb29mcyI6IFt7ImlkIjogIjFjQ05JQVoyWC93MSIsICJhbW91bn"
"QiOiAyLCAic2VjcmV0IjogIjZmZjFiY2VlOGUzMzk2NGE4ZDNjNGQ5NzYwNzdiZDI4"
"ZGVkZWJkODYyMDU0MDQzNDY4ZjU5ZDFiZjI1OTQzN2QiLCAiQyI6ICIwM2I3ZDlkMzIzYTAxOWJlN"
"TE4NzRlOGE5OGY1NDViOTg3Y2JmNmU5MWUwMDc1YTFhZjQ3MjY2NDMxOGRlZTQzZTU"
"ifSwgeyJpZCI6ICIxY0NOSUFaMlgvdzEiLCAiYW1vdW50IjogOCwgInNlY3JldCI6ICIyYWQ0MzJkN"
"GQ1ODYzMmIyZGUzMjRmZDFiYTk5NzJmYTgwOWM2ZTdkYTVlOTJlZWZiMGI2NjEyZD"
"kzZDdlMDAwIiwgIkMiOiAiMDMyYWZiODM5ZDAyZGYxY2E4ZjlkZmM1MjU3NTE3ZDQzNjhiN2IxNzQz"
"ODMzYmVhZTNkNDM2YTFiZDBiYmRiNWQ4In1dLCAibWludCI6ICJodHRwOi8vbG9jY"
"Wxob3N0OjMzMzgifV19"
)
token_str = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6IjI4MDliZjk5YTgzOGJkNzU1NjAwNTVmMjFlNzZiNzYwOTEwMGE1M2FhMzJiNDUwMDRiOGEzMzU0NDgwN2Q4N2MiLCJDIjoiMDJhZWFmNmE5OGY1MjQzZGY4YTA0OGIzNzYzMjUzNjYxZTgxMjFkODhlZTIzNGZmM2ZmYjQ2ZWM0YWIyOWIyYmJlIiwiZGxlcSI6eyJlIjoiNThlOGYzYTVkOGE2M2M1NGJkMjM5YzE4ZWJkMWUxZWFiZmJkZWMyMzhkNDBjZWExOGJjOWJmY2M0NjIyNGRjYyIsInMiOiIwNGYwM2FkMTA3MTE4NGQzZWIyOTNlYjRhMWI3MGY1OTQ0Mjg1NmJhYzNmNWJjZDE2OWJkMmVhOGVkNmY0NjlhIiwiciI6IjExZDUwMjRhM2U3N2Q0MzNhN2VjMTgwOGE5NzgzNGY2MzlhYjVkYjZhNjZhNmQzYWZlM2M4NGUyNmEzZWM3MDcifX0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImY1OGNhNmMwYTA3YWI2YjFmMWUzYjIzYWU3MDc3ODE5MzNiMGExNTExYWYzZWQyMjRmY2FjNzgxM2NhZTQ5OTYiLCJDIjoiMDNiZDVhMWFmN2NkMTY0MjA2MmU5NjRlYTZjOTlhZGRiNWI1YjRhYmY5Y2VmMjhjZWRhYmJhNGFlM2QyYmUyOGRmIiwiZGxlcSI6eyJlIjoiMmQzNTc2YzU3ZTM5ZjFiMzAzN2RmYjdhYmViOTE2M2I2ZGIxMjExMTBjNTZiY2NkYzhmMTcyN2MzZTg4NjQyNyIsInMiOiIzNDg2M2MxNDU5ZmI5MTk4ZjNhNjAyYzZhMWRkYmExNzc0NWUzN2M2ZGNiMjNiMmQxMmU3NGM3YzE3MjZiOWYwIiwiciI6IjYxNzRlNjBiZjU4MGYyOTBiNTIwZjMxYzc5MjBlNTA3MDkxNmJmYzRmZTc0MDNhNjY5ZThlNmEzOThhNGQ3YTcifX1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJ1bml0Ijoic2F0In0="
token_str_no_dleq = "cashuAeyJ0b2tlbiI6W3sicHJvb2ZzIjpbeyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjgsInNlY3JldCI6IjI4MDliZjk5YTgzOGJkNzU1NjAwNTVmMjFlNzZiNzYwOTEwMGE1M2FhMzJiNDUwMDRiOGEzMzU0NDgwN2Q4N2MiLCJDIjoiMDJhZWFmNmE5OGY1MjQzZGY4YTA0OGIzNzYzMjUzNjYxZTgxMjFkODhlZTIzNGZmM2ZmYjQ2ZWM0YWIyOWIyYmJlIn0seyJpZCI6IjAwYWQyNjhjNGQxZjU4MjYiLCJhbW91bnQiOjIsInNlY3JldCI6ImY1OGNhNmMwYTA3YWI2YjFmMWUzYjIzYWU3MDc3ODE5MzNiMGExNTExYWYzZWQyMjRmY2FjNzgxM2NhZTQ5OTYiLCJDIjoiMDNiZDVhMWFmN2NkMTY0MjA2MmU5NjRlYTZjOTlhZGRiNWI1YjRhYmY5Y2VmMjhjZWRhYmJhNGFlM2QyYmUyOGRmIn1dLCJtaW50IjoiaHR0cDovL2xvY2FsaG9zdDozMzM4In1dLCJ1bml0Ijoic2F0In0="
token = TokenV3.deserialize(token_str)
assert token.serialize(include_dleq=False) == token_str_no_dleq

Expand Down

0 comments on commit 2887d8d

Please sign in to comment.