Skip to content

Commit

Permalink
add cubic response function
Browse files Browse the repository at this point in the history
  • Loading branch information
samuelstanton committed Jul 10, 2024
1 parent 4d94731 commit 8638baa
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 11 deletions.
1 change: 1 addition & 0 deletions config/hydra/test_function/ehrlich.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ dim: 256
num_motifs: 4
motif_length: 8
quantization: ${test_function.motif_length}
epistasis_factor: 0.0
noise_std: 0.0
negate: true
random_seed: 0 # only change this if you want to change the actual test function instance
31 changes: 20 additions & 11 deletions holo/test_functions/closed_form/_ehrlich.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ def __init__(
num_motifs: int = 1,
motif_length: int = 3,
quantization: int | None = None,
epistasis_factor: float = 0.0,
noise_std: float = 0.0,
negate: bool = False,
random_seed: int = 0,
Expand All @@ -29,13 +30,15 @@ def __init__(
self.num_states = num_states
self.dim = dim
self._random_seed = random_seed
self._motif_length = motif_length
self._quantization = quantization
super(Ehrlich, self).__init__(
noise_std=noise_std,
negate=negate,
bounds=bounds,
)
self._generator = torch.Generator().manual_seed(random_seed)
self._epistasis_factor = epistasis_factor
self.initial_dist = torch.ones(num_states) / num_states
bandwidth = int(num_states * 0.4)
self.transition_matrix = sample_sparse_ergodic_transition_matrix(
Expand Down Expand Up @@ -72,25 +75,26 @@ def __init__(
self.spacings.append(spacing)

def evaluate_true(self, X: torch.Tensor) -> torch.Tensor:
motif_present = []
motif_contrib = []
for motif, spacing in zip(self.motifs, self.spacings):
motif_present.append(
motif_search(
solution=X,
motif=motif,
spacing=spacing,
mode="present",
quantization=self._quantization,
)
motif_present = motif_search(
solution=X,
motif=motif,
spacing=spacing,
mode="present",
quantization=self._quantization,
)
all_motifs_present = torch.stack(motif_present).prod(dim=0)
response = _cubic_response(motif_present, self._epistasis_factor)
motif_contrib.append(response)

all_motifs_contrib = torch.stack(motif_contrib).prod(dim=0)
log_likelihood = dmp_sample_log_likelihood(
samples=X,
initial_dist=self.initial_dist,
transition_matrix=self.transition_matrix,
)
is_feasible = log_likelihood > -float("inf")
return torch.where(is_feasible, all_motifs_present, -float("inf"))
return torch.where(is_feasible, all_motifs_contrib, -float("inf"))

def initial_solution(self, n: int = 1):
# reset generator seed so initial solution is always the same
Expand Down Expand Up @@ -168,3 +172,8 @@ def __repr__(self):
f"negate={self.negate}, "
f"random_seed={self._random_seed})"
)


def _cubic_response(X: torch.Tensor, epistasis_factor: float):
coeff = epistasis_factor * X * (X - 1.0) + 1.0
return coeff * X

0 comments on commit 8638baa

Please sign in to comment.