Skip to content

Commit

Permalink
Put n at end of SparseTernary
Browse files Browse the repository at this point in the history
Also: add SparseBinary, Binary, Ternary as noise distributions :).

Note: this is a breaking change.
	  You have to change the way you constructe ND.SparseTernary objects!
	  Benefit: you don't have to specify the dimension `n` anymore when putting it in a LWEParameters/NTRUParameters object :).
  • Loading branch information
ludopulles committed Sep 20, 2024
1 parent 390acd5 commit 36fa7d9
Show file tree
Hide file tree
Showing 10 changed files with 104 additions and 73 deletions.
2 changes: 1 addition & 1 deletion docs/algorithms/lwe-bkw.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Coded-BKW for LWE
We construct an example LWE instance::

from estimator import *
params = LWE.Parameters(n=400, q=7981, Xs=ND.SparseTernary(384, 16), Xe=ND.CenteredBinomial(4), m=800)
params = LWE.Parameters(n=400, q=7981, Xs=ND.SparseTernary(16), Xe=ND.CenteredBinomial(4), m=800)
params

and estimate the cost of Coded-BKW [C:GuoJohSta15]_, [C:KirFou15]_::
Expand Down
2 changes: 1 addition & 1 deletion docs/algorithms/lwe-dual.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ We construct an (easy) example LWE instance::

from estimator import *
from estimator.lwe_dual import dual_hybrid, matzov
params = LWE.Parameters(n=200, q=7981, Xs=ND.SparseTernary(384, 16), Xe=ND.CenteredBinomial(4))
params = LWE.Parameters(n=200, q=7981, Xs=ND.SparseTernary(16), Xe=ND.CenteredBinomial(4))
params

The simplest (and quickest to estimate) algorithm is the "plain" dual attack as described in [PQCBook:MicReg09]_::
Expand Down
2 changes: 1 addition & 1 deletion docs/algorithms/lwe-primal.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ LWE Primal Attacks
We construct an (easy) example LWE instance::

from estimator import *
params = LWE.Parameters(n=200, q=7981, Xs=ND.SparseTernary(384, 16), Xe=ND.CenteredBinomial(4))
params = LWE.Parameters(n=200, q=7981, Xs=ND.SparseTernary(16), Xe=ND.CenteredBinomial(4))
params

The simplest (and quickest to estimate) model is solving via uSVP and assuming the Geometric Series
Expand Down
3 changes: 1 addition & 2 deletions estimator/gb.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,8 +213,7 @@ def __call__(
rop: ≈2^227.2, dreg: 54, mem: ≈2^227.2, t: 4, m: 1024, tag: arora-gb
>>> LWE.arora_gb(params.updated(Xs=ND.UniformMod(3), Xe=ND.CenteredBinomial(4), m=1024))
rop: ≈2^189.9, dreg: 39, mem: ≈2^189.9, t: 4, m: 1024, tag: arora-gb
>>> Xs, Xe =ND.SparseTernary(1024, 64, 0), ND.DiscreteGaussian(2**10)
>>> LWE.arora_gb(LWE.Parameters(n=1024, q=2**40, Xs=Xs, Xe=Xe))
>>> LWE.arora_gb(LWE.Parameters(n=1024, q=2**40, Xs=ND.SparseBinary(64), Xe=ND.DiscreteGaussian(2**10)))
rop: ≈2^inf, dreg: ≈2^inf, tag: arora-gb
.. [EPRINT:ACFP14] Martin R. Albrecht, Carlos Cid, Jean-Charles Faugère & Ludovic Perret. (2014).
Expand Down
4 changes: 2 additions & 2 deletions estimator/lwe_dual.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ def __call__(
>>> from estimator import *
>>> from estimator.lwe_dual import dual_hybrid
>>> params = LWE.Parameters(n=1024, q = 2**32, Xs=ND.Uniform(0,1), Xe=ND.DiscreteGaussian(3.0))
>>> params = LWE.Parameters(n=1024, q = 2**32, Xs=ND.Binary, Xe=ND.DiscreteGaussian(3.0))
>>> LWE.dual(params)
rop: ≈2^107.0, mem: ≈2^66.4, m: 970, β: 264, d: 1994, ↻: 1, tag: dual
>>> dual_hybrid(params)
Expand All @@ -373,7 +373,7 @@ def __call__(
>>> dual_hybrid(params, mitm_optimization="numerical")
rop: ≈2^129.0, m: 1145, k: 1, mem: ≈2^131.0, ↻: 1, β: 346, d: 2044, ζ: 125, tag: dual_mitm_hybrid
>>> params = params.updated(Xs=ND.SparseTernary(params.n, 32))
>>> params = params.updated(Xs=ND.SparseTernary(32))
>>> LWE.dual(params)
rop: ≈2^103.4, mem: ≈2^63.9, m: 904, β: 251, d: 1928, ↻: 1, tag: dual
>>> dual_hybrid(params)
Expand Down
14 changes: 7 additions & 7 deletions estimator/lwe_guess.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,12 +127,12 @@ def __call__(self, params, log_level=5, **kwds):
>>> from estimator import *
>>> from estimator.lwe_guess import guess_composition
>>> guess_composition(LWE.primal_usvp)(schemes.Kyber512.updated(Xs=ND.SparseTernary(512, 16)))
>>> guess_composition(LWE.primal_usvp)(schemes.Kyber512.updated(Xs=ND.SparseTernary(16)))
rop: ≈2^102.2, red: ≈2^102.2, δ: 1.008011, β: 132, d: 461, tag: usvp, ↻: ≈2^34.9, ζ: 252, |S|: 1, ...
Compare::
>>> LWE.primal_hybrid(schemes.Kyber512.updated(Xs=ND.SparseTernary(512, 16)))
>>> LWE.primal_hybrid(schemes.Kyber512.updated(Xs=ND.SparseTernary(16)))
rop: ≈2^85.8, red: ≈2^84.8, svp: ≈2^84.8, β: 105, η: 2, ζ: 366, |S|: ≈2^85.1, d: 315, prob: ≈2^-23.4, ↻:...
"""
Expand Down Expand Up @@ -161,10 +161,10 @@ def __call__(self, params: LWEParameters, success_probability=0.99, quantum: boo
>>> from estimator import *
>>> from estimator.lwe_guess import exhaustive_search
>>> params = LWE.Parameters(n=64, q=2**40, Xs=ND.UniformMod(2), Xe=ND.DiscreteGaussian(3.2))
>>> params = LWE.Parameters(n=64, q=2**40, Xs=ND.Binary, Xe=ND.DiscreteGaussian(3.2))
>>> exhaustive_search(params)
rop: ≈2^73.6, mem: ≈2^72.6, m: 397.198
>>> params = LWE.Parameters(n=1024, q=2**40, Xs=ND.SparseTernary(n=1024, p=32), Xe=ND.DiscreteGaussian(3.2))
>>> params = LWE.Parameters(n=1024, q=2**40, Xs=ND.SparseTernary(32), Xe=ND.DiscreteGaussian(3.2))
>>> exhaustive_search(params)
rop: ≈2^413.9, mem: ≈2^412.9, m: ≈2^11.1
Expand Down Expand Up @@ -336,12 +336,12 @@ def __call__(self, params: LWEParameters, success_probability=0.99, optimization
>>> from estimator import *
>>> from estimator.lwe_guess import mitm
>>> params = LWE.Parameters(n=64, q=2**40, Xs=ND.UniformMod(2), Xe=ND.DiscreteGaussian(3.2))
>>> params = LWE.Parameters(n=64, q=2**40, Xs=ND.Binary, Xe=ND.DiscreteGaussian(3.2))
>>> mitm(params)
rop: ≈2^37.0, mem: ≈2^37.2, m: 37, k: 32, ↻: 1
>>> mitm(params, optimization="numerical")
rop: ≈2^39.2, m: 36, k: 32, mem: ≈2^39.1, ↻: 1
>>> params = LWE.Parameters(n=1024, q=2**40, Xs=ND.SparseTernary(n=1024, p=32), Xe=ND.DiscreteGaussian(3.2))
>>> params = LWE.Parameters(n=1024, q=2**40, Xs=ND.SparseTernary(32), Xe=ND.DiscreteGaussian(3.2))
>>> mitm(params)
rop: ≈2^217.8, mem: ≈2^210.2, m: ≈2^15.5, k: 512, ↻: 226
>>> mitm(params, optimization="numerical")
Expand Down Expand Up @@ -398,7 +398,7 @@ def __call__(self, params: LWEParameters, success_probability=0.99):
>>> from estimator import *
>>> from estimator.lwe_guess import distinguish
>>> params = LWE.Parameters(n=0, q=2 ** 32, Xs=ND.UniformMod(2), Xe=ND.DiscreteGaussian(2 ** 32))
>>> params = LWE.Parameters(n=0, q=2 ** 32, Xs=ND.Binary, Xe=ND.DiscreteGaussian(2 ** 32))
>>> distinguish(params)
rop: ≈2^60.0, mem: ≈2^60.0, m: ≈2^60.0
Expand Down
9 changes: 5 additions & 4 deletions estimator/lwe_primal.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,16 +553,17 @@ def __call__(
EXAMPLES::
>>> from estimator import *
>>> LWE.primal_hybrid(schemes.Kyber512.updated(Xs=ND.SparseTernary(512, 16)), mitm = False, babai = False)
>>> params = schemes.Kyber512.updated(Xs=ND.SparseTernary(16))
>>> LWE.primal_hybrid(params, mitm=False, babai=False)
rop: ≈2^91.5, red: ≈2^90.7, svp: ≈2^90.2, β: 178, η: 21, ζ: 256, |S|: ≈2^56.6, d: 531, prob: 0.003, ↻: 1...
>>> LWE.primal_hybrid(schemes.Kyber512.updated(Xs=ND.SparseTernary(512, 16)), mitm = False, babai = True)
>>> LWE.primal_hybrid(params, mitm=False, babai=True)
rop: ≈2^88.7, red: ≈2^88.0, svp: ≈2^87.2, β: 98, η: 2, ζ: 323, |S|: ≈2^39.7, d: 346, prob: ≈2^-28.4, ↻: ...
>>> LWE.primal_hybrid(schemes.Kyber512.updated(Xs=ND.SparseTernary(512, 16)), mitm = True, babai = False)
>>> LWE.primal_hybrid(params, mitm=True, babai=False)
rop: ≈2^74.1, red: ≈2^73.7, svp: ≈2^71.9, β: 104, η: 16, ζ: 320, |S|: ≈2^77.1, d: 359, prob: ≈2^-12.3, ↻...
>>> LWE.primal_hybrid(schemes.Kyber512.updated(Xs=ND.SparseTernary(512, 16)), mitm = True, babai = True)
>>> LWE.primal_hybrid(params, mitm=True, babai=True)
rop: ≈2^85.8, red: ≈2^84.8, svp: ≈2^84.8, β: 105, η: 2, ζ: 366, |S|: ≈2^85.1, d: 315, prob: ≈2^-23.4, ↻:...
TESTS:
Expand Down
80 changes: 57 additions & 23 deletions estimator/nd.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,21 @@ def sigmaf(stddev):
class NoiseDistribution:
"""
All noise distributions are instances of this class.
It is recommended to pick one of the following available implementations below:
- DiscreteGaussian
- DiscreteGaussianAlpha
- CenteredBinomial
- Uniform
- UniformMod
- SparseTernary
- SparseBinary
- Binary
- Ternary
NOTE:
Generally, to generate an LWE parameter you call one of the above for the secret and error,
**without** specifying the dimension `n` and `m` for secret/error respectively!
These are initialized, when constructing the LWEParameters object.
"""
stddev: float = 0
mean: float = 0
Expand Down Expand Up @@ -153,7 +168,7 @@ def __len__(self):
EXAMPLE::
>>> from estimator import *
>>> D = ND.SparseTernary(1024, p=128, m=128)
>>> D = ND.SparseTernary(128, 128, 1024)
>>> len(D)
1024
>>> int(round(len(D) * float(D.density)))
Expand Down Expand Up @@ -199,17 +214,6 @@ def support_size(self, fraction=1.0):
raise NotImplementedError("support_size")


"""
The follow noise distributions are implemented below:
- DiscreteGaussian
- DiscreteGaussianAlpha
- CenteredBinomial
- Uniform
- UniformMod
- SparseTernary
"""


class DiscreteGaussian(NoiseDistribution):
"""
A discrete Gaussian distribution with standard deviation ``stddev`` per component.
Expand Down Expand Up @@ -382,19 +386,25 @@ class SparseTernary(NoiseDistribution):
Distribution of vectors of length ``n`` with ``p`` entries of 1 and ``m`` entries of -1, rest 0.
EXAMPLE::
>>> from estimator import *
>>> ND.SparseTernary(100, p=10)
>>> ND.SparseTernary(10, n=100)
D(σ=0.45)
>>> ND.SparseTernary(100, p=10, m=10)
>>> ND.SparseTernary(10, 10, 100)
D(σ=0.45)
>>> ND.SparseTernary(100, p=10, m=8)
>>> ND.SparseTernary(10, 8, 100)
D(σ=0.42, μ=0.02)
>>> ND.SparseTernary(0, 0, 0).support_size()
1
"""
def __init__(self, n, p, m=None):
def __init__(self, p, m=None, n=None):
p, m = int(p), int(p if m is None else m)
self.p, self.m = p, m

# Yes, n=0 might happen when estimating the cost of the dual attack!
# Yes, n=0 might happen when estimating the cost of the dual attack! Support size is 1
if n is None:
# Treat it the same as n=0.
n = 0
mean = 0 if n == 0 else RR((p - m) / n)
density = 0 if n == 0 else RR((p + m) / n)
stddev = sqrt(density - mean**2)
Expand All @@ -403,7 +413,7 @@ def __init__(self, n, p, m=None):
stddev=stddev,
mean=mean,
density=density,
bounds=(-1, 1),
bounds=(0 if m == 0 else -1, 0 if p == 0 else 1),
n=n
)

Expand All @@ -412,7 +422,7 @@ def __hash__(self):
EXAMPLE::
>>> from estimator import *
>>> hash(ND.SparseTernary(128, 16)) == hash(("SparseTernary", 128, 16, 16))
>>> hash(ND.SparseTernary(16, n=128)) == hash(("SparseTernary", 128, 16, 16))
True
"""
return hash(("SparseTernary", self.n, self.p, self.m))
Expand All @@ -422,7 +432,7 @@ def resize(self, new_n):
Return an altered distribution having a dimension `new_n`.
Assumes `p` and `m` stay the same.
"""
return SparseTernary(new_n, self.p, self.m)
return SparseTernary(self.p, self.m, new_n)

def split_balanced(self, new_n, new_hw=None):
"""
Expand All @@ -441,8 +451,8 @@ def split_balanced(self, new_n, new_hw=None):
new_p = int((QQ(new_hw * self.p) / hw).round('down'))
new_m = new_hw - new_p
return (
SparseTernary(new_n, new_p, new_m),
SparseTernary(n - new_n, self.p - new_p, self.m - new_m)
SparseTernary(new_p, new_m, new_n),
SparseTernary(self.p - new_p, self.m - new_m, n - new_n)
)

def split_probability(self, new_n, new_hw=None):
Expand All @@ -465,8 +475,32 @@ def support_size(self, fraction=1.0):
EXAMPLE::
>>> from estimator import *
>>> ND.SparseTernary(64, 8).support_size()
>>> ND.SparseTernary(8, 8, 64).support_size()
6287341680214194176
"""
n, p, m = len(self), self.p, self.m
return ceil(binomial(n, p) * binomial(n - p, m) * RR(fraction))


def SparseBinary(hw, n=None):
"""
Sparse binary noise distribution having `hw` coefficients equal to 1, and the rest zero.
EXAMPLE::
>>> from estimator import *
>>> ND.SparseBinary(10).bounds
(0, 1)
"""
return SparseTernary(hw, 0, n)


"""
Binary noise uniform from {0, 1}^n
"""
Binary = Uniform(0, 1)

"""
Ternary noise uniform from {-1, 0, 1}^n
"""
Ternary = Uniform(-1, 1)
13 changes: 5 additions & 8 deletions estimator/ntru_primal.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,20 +363,17 @@ def __call__(
EXAMPLES::
>>> from estimator import *
>>> NTRU.primal_hybrid(schemes.NTRUHPS2048509Enc.updated(Xs=ND.SparseTernary(508,16)),
... mitm = False, babai = False)
>>> params = schemes.NTRUHPS2048509Enc.updated(Xs=ND.SparseTernary(16))
>>> NTRU.primal_hybrid(params, mitm=False, babai=False)
rop: ≈2^87.8, red: ≈2^87.0, svp: ≈2^86.6, β: 116, η: 21, ζ: 302, |S|: ≈2^39.2, d: 372, prob: ≈2^-22.3, ↻...
>>> NTRU.primal_hybrid(schemes.NTRUHPS2048509Enc.updated(Xs=ND.SparseTernary(508,16)),
... mitm = False, babai = True)
>>> NTRU.primal_hybrid(params, mitm=False, babai=True)
rop: ≈2^88.0, red: ≈2^87.4, svp: ≈2^86.4, β: 98, η: 2, ζ: 318, |S|: ≈2^39.6, d: 328, prob: ≈2^-27.9, ↻: ...
>>> NTRU.primal_hybrid(schemes.NTRUHPS2048509Enc.updated(Xs=ND.SparseTernary(508,16)),
... mitm = True, babai = False)
>>> NTRU.primal_hybrid(params, mitm=True, babai=False)
rop: ≈2^80.1, red: ≈2^79.7, svp: ≈2^78.3, β: 170, η: 22, ζ: 254, |S|: ≈2^103.7, d: 495, prob: 0.708, ↻: ...
>>> NTRU.primal_hybrid(schemes.NTRUHPS2048509Enc.updated(Xs=ND.SparseTernary(508,16)),
... mitm = True, babai = True)
>>> NTRU.primal_hybrid(params, mitm=True, babai=True)
rop: ≈2^85.1, red: ≈2^84.1, svp: ≈2^84.0, β: 105, η: 2, ζ: 363, |S|: ≈2^85.0, d: 294, prob: ≈2^-22.9, ↻:...
TESTS:
Expand Down
Loading

0 comments on commit 36fa7d9

Please sign in to comment.