Skip to content

Commit

Permalink
Don't call SparseTernary with half-integers!
Browse files Browse the repository at this point in the history
And, while we are at it, make a beautiful function that splits two sparse ternary distributions into two, while preserving its hamming weight.
  • Loading branch information
ludopulles committed Sep 19, 2024
1 parent b689940 commit b5781ba
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 9 deletions.
15 changes: 6 additions & 9 deletions estimator/lwe_dual.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,15 +60,12 @@ def dual_reduce(
)

# Compute new secret distribution
if params.Xs.is_sparse:
if type(params.Xs) is SparseTernary:
h = params.Xs.hamming_weight
if not 0 <= h1 <= h:
raise OutOfBoundsError(f"Splitting weight {h1} must be between 0 and h={h}.")
# assuming the non-zero entries are uniform
p = h1 / 2
red_Xs = SparseTernary(params.n - zeta, h / 2 - p)
slv_Xs = SparseTernary(zeta, p)

# split the +1 and -1 entries in a balanced way.
slv_Xs, red_Xs = params.Xs.split_balanced(zeta, h1)
if h1 == h:
# no reason to do lattice reduction if we assume
# that the hw on the reduction part is 0
Expand Down Expand Up @@ -176,7 +173,7 @@ def cost(
Logging.log("dual", log_level, f"{repr(cost)}")

rep = 1
if params.Xs.is_sparse:
if type(params.Xs) is SparseTernary:
h = params.Xs.hamming_weight
probability = RR(prob_drop(params.n, h, zeta, h1))
rep = prob_amplify(success_probability, probability)
Expand Down Expand Up @@ -313,7 +310,7 @@ def f(beta):
beta = cost["beta"]

cost["zeta"] = zeta
if params.Xs.is_sparse:
if type(params.Xs) is SparseTernary:
cost["h1"] = h1
return cost

Expand Down Expand Up @@ -428,7 +425,7 @@ def __call__(

params = params.normalize()

if params.Xs.is_sparse:
if type(params.Xs) is SparseTernary:
Cost.register_impermanent(h1=False)

def _optimize_blocksize(
Expand Down
21 changes: 21 additions & 0 deletions estimator/nd.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,6 +423,27 @@ def resize(self, new_n):
"""
return SparseTernary(new_n, self.p, self.m)

def split_balanced(self, new_n, new_hw=None):
"""
Split the +1 and -1 entries in a balanced way, and return 2 SparseTernary distributions:
one of dimension `new_n` and the other of dimension `n - new_n`.
:param new_n: dimension of the first noise distribution
:param new_hw: hamming weight of the first noise distribution. If none, we take the most likely weight.
:return: tuple of (SparseTernary, SparseTernary)
"""
n, hw = len(self), self.hamming_weight
if new_hw is None:
# Most likely split has same density: new_hw / new_n = hw / n.
new_hw = int(round(hw * new_n / n))

new_p = int(round((new_hw * self.p) / hw))
new_m = new_hw - new_p
return (
SparseTernary(new_n, new_p, new_m),
SparseTernary(n - new_n, self.p - new_p, self.m - new_m)
)

@property
def hamming_weight(self):
return self.p + self.m
Expand Down

0 comments on commit b5781ba

Please sign in to comment.