Skip to content

Commit

Permalink
update simple name optimization
Browse files Browse the repository at this point in the history
  • Loading branch information
Carbon225 authored and djstrong committed Nov 29, 2023
1 parent a993008 commit 0f93e5b
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 13 deletions.
19 changes: 16 additions & 3 deletions ens_normalize/normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -875,7 +875,7 @@ def tokens2beautified(tokens: List[Token], label_is_greek: List[bool]) -> str:
return ''.join(s)


SIMPLE_NAME_REGEX = re.compile(r'^[a-z0-9]+$')
SIMPLE_NAME_REGEX = re.compile(r'^[a-z0-9]+(?:\.[a-z0-9]+)*$')


def ens_process(input: str,
Expand Down Expand Up @@ -905,16 +905,29 @@ def ens_process(input: str,
- `normalizations`: list of `NormalizableSequence` objects or `None` if `do_normalizations` is `False`
"""
if SIMPLE_NAME_REGEX.match(input) is not None:
if do_tokenize:
tokens = []
current_cps = []
for c in input:
if ord(c) == CP_STOP:
tokens.append(TokenValid(cps=current_cps))
tokens.append(TokenStop())
current_cps = []
else:
current_cps.append(ord(c))
tokens.append(TokenValid(cps=current_cps))
else:
tokens = None
return ENSProcessResult(
normalized=input if do_normalize else None,
beautified=input if do_beautify else None,
tokens=[TokenValid(cps=[ord(c) for c in input])] if do_tokenize else None,
tokens=tokens,
cured=input if do_cure else None,
cures=[] if do_cure else None,
error=None,
normalizations=[] if do_normalizations else None,
)

tokens: List[Token] = []
error = None

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "ens-normalize"
version = "3.0.6"
version = "3.0.7"
description = "Ethereum Name Service (ENS) Name Normalizer"
license = "MIT"
authors = ["Jakub Karbowski <[email protected]>"]
Expand Down
18 changes: 9 additions & 9 deletions tests/test_normalization.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ def test_is_normalizable():

def test_simple_name_optimization():
r = ens_process(
'abc123',
'abc123.eth',
do_normalize=False,
do_beautify=False,
do_tokenize=False,
Expand All @@ -443,14 +443,14 @@ def test_simple_name_optimization():
assert r.normalizations is None

r = ens_process(
'abc123',
'abc123.eth',
do_normalize=True,
do_beautify=False,
do_tokenize=False,
do_normalizations=False,
do_cure=False,
)
assert r.normalized == 'abc123'
assert r.normalized == 'abc123.eth'
assert r.beautified is None
assert r.tokens is None
assert r.cured is None
Expand All @@ -459,23 +459,23 @@ def test_simple_name_optimization():
assert r.normalizations is None

r = ens_process(
'abc123',
'abc123.eth',
do_normalize=False,
do_beautify=True,
do_tokenize=False,
do_normalizations=False,
do_cure=False,
)
assert r.normalized is None
assert r.beautified == 'abc123'
assert r.beautified == 'abc123.eth'
assert r.tokens is None
assert r.cured is None
assert r.cures is None
assert r.error is None
assert r.normalizations is None

r = ens_process(
'abc123',
'abc123.eth',
do_normalize=False,
do_beautify=False,
do_tokenize=True,
Expand All @@ -491,7 +491,7 @@ def test_simple_name_optimization():
assert r.normalizations is None

r = ens_process(
'abc123',
'abc123.eth',
do_normalize=False,
do_beautify=False,
do_tokenize=False,
Expand All @@ -509,7 +509,7 @@ def test_simple_name_optimization():
assert len(r.normalizations) == 0

r = ens_process(
'abc123',
'abc123.eth',
do_normalize=False,
do_beautify=False,
do_tokenize=False,
Expand All @@ -519,7 +519,7 @@ def test_simple_name_optimization():
assert r.normalized is None
assert r.beautified is None
assert r.tokens is None
assert r.cured == 'abc123'
assert r.cured == 'abc123.eth'
assert r.cures is not None
assert len(r.cures) == 0
assert r.error is None
Expand Down

0 comments on commit 0f93e5b

Please sign in to comment.