Skip to content

Commit

Permalink
Remove historical features infrastructure (elastic#117043)
Browse files Browse the repository at this point in the history
v9 can only talk to 8.18, and historical features are a maximum of 8.12, so we can remove all historical features and infrastructure.
  • Loading branch information
thecoop authored Nov 24, 2024
1 parent 5f3b380 commit e701697
Show file tree
Hide file tree
Showing 19 changed files with 62 additions and 337 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ class BuildPluginFuncTest extends AbstractGradleFuncTest {
noticeFile.set(file("NOTICE"))
"""
when:
def result = gradleRunner("assemble", "-x", "generateHistoricalFeaturesMetadata").build()
def result = gradleRunner("assemble", "-x", "generateClusterFeaturesMetadata").build()
then:
result.task(":assemble").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world.jar").exists()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
"""

when:
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateHistoricalFeaturesMetadata').build()
def result = gradleRunner('assemble', '--stacktrace', '-x', 'generateClusterFeaturesMetadata').build()

then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.elasticsearch.gradle.internal.info.BuildParameterExtension;
import org.elasticsearch.gradle.internal.precommit.JarHellPrecommitPlugin;
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
import org.elasticsearch.gradle.testclusters.ElasticsearchCluster;
Expand All @@ -38,7 +38,7 @@ public void apply(Project project) {
project.getPluginManager().apply(PluginBuildPlugin.class);
project.getPluginManager().apply(JarHellPrecommitPlugin.class);
project.getPluginManager().apply(ElasticsearchJavaPlugin.class);
project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class);
project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class);
boolean isCi = project.getRootProject().getExtensions().getByType(BuildParameterExtension.class).isCi();
// Clear default dependencies added by public PluginBuildPlugin as we add our
// own project dependencies for internal builds
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin;
import org.elasticsearch.gradle.internal.precommit.InternalPrecommitTasks;
import org.elasticsearch.gradle.internal.snyk.SnykDependencyMonitoringGradlePlugin;
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
import org.gradle.api.InvalidUserDataException;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
Expand Down Expand Up @@ -63,7 +63,7 @@ public void apply(final Project project) {
project.getPluginManager().apply(ElasticsearchJavadocPlugin.class);
project.getPluginManager().apply(DependenciesInfoPlugin.class);
project.getPluginManager().apply(SnykDependencyMonitoringGradlePlugin.class);
project.getPluginManager().apply(HistoricalFeaturesMetadataPlugin.class);
project.getPluginManager().apply(ClusterFeaturesMetadataPlugin.class);
InternalPrecommitTasks.create(project, true);
configureLicenseAndNotice(project);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@
import java.util.Map;

/**
* Extracts historical feature metadata into a machine-readable format for use in backward compatibility testing.
* Extracts cluster feature metadata into a machine-readable format for use in backward compatibility testing.
*/
public class HistoricalFeaturesMetadataPlugin implements Plugin<Project> {
public static final String HISTORICAL_FEATURES_JSON = "historical-features.json";
public class ClusterFeaturesMetadataPlugin implements Plugin<Project> {
public static final String CLUSTER_FEATURES_JSON = "cluster-features.json";
public static final String FEATURES_METADATA_TYPE = "features-metadata-json";
public static final String FEATURES_METADATA_CONFIGURATION = "featuresMetadata";

Expand All @@ -40,13 +40,13 @@ public void apply(Project project) {
SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class);
SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME);

TaskProvider<HistoricalFeaturesMetadataTask> generateTask = project.getTasks()
.register("generateHistoricalFeaturesMetadata", HistoricalFeaturesMetadataTask.class, task -> {
TaskProvider<ClusterFeaturesMetadataTask> generateTask = project.getTasks()
.register("generateClusterFeaturesMetadata", ClusterFeaturesMetadataTask.class, task -> {
task.setClasspath(
featureMetadataExtractorConfig.plus(mainSourceSet.getRuntimeClasspath())
.plus(project.getConfigurations().getByName(CompileOnlyResolvePlugin.RESOLVEABLE_COMPILE_ONLY_CONFIGURATION_NAME))
);
task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(HISTORICAL_FEATURES_JSON));
task.getOutputFile().convention(project.getLayout().getBuildDirectory().file(CLUSTER_FEATURES_JSON));
});

Configuration featuresMetadataArtifactConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import javax.inject.Inject;

@CacheableTask
public abstract class HistoricalFeaturesMetadataTask extends DefaultTask {
public abstract class ClusterFeaturesMetadataTask extends DefaultTask {
private FileCollection classpath;

@OutputFile
Expand All @@ -46,30 +46,30 @@ public void setClasspath(FileCollection classpath) {

@TaskAction
public void execute() {
getWorkerExecutor().noIsolation().submit(HistoricalFeaturesMetadataWorkAction.class, params -> {
getWorkerExecutor().noIsolation().submit(ClusterFeaturesMetadataWorkAction.class, params -> {
params.getClasspath().setFrom(getClasspath());
params.getOutputFile().set(getOutputFile());
});
}

public interface HistoricalFeaturesWorkParameters extends WorkParameters {
public interface ClusterFeaturesWorkParameters extends WorkParameters {
ConfigurableFileCollection getClasspath();

RegularFileProperty getOutputFile();
}

public abstract static class HistoricalFeaturesMetadataWorkAction implements WorkAction<HistoricalFeaturesWorkParameters> {
public abstract static class ClusterFeaturesMetadataWorkAction implements WorkAction<ClusterFeaturesWorkParameters> {
private final ExecOperations execOperations;

@Inject
public HistoricalFeaturesMetadataWorkAction(ExecOperations execOperations) {
public ClusterFeaturesMetadataWorkAction(ExecOperations execOperations) {
this.execOperations = execOperations;
}

@Override
public void execute() {
LoggedExec.javaexec(execOperations, spec -> {
spec.getMainClass().set("org.elasticsearch.extractor.features.HistoricalFeaturesMetadataExtractor");
spec.getMainClass().set("org.elasticsearch.extractor.features.ClusterFeaturesMetadataExtractor");
spec.classpath(getParameters().getClasspath());
spec.args(getParameters().getOutputFile().get().getAsFile().getAbsolutePath());
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
import org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes;
import org.elasticsearch.gradle.internal.ElasticsearchJavaBasePlugin;
import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin;
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin;
import org.elasticsearch.gradle.internal.test.ErrorReportingTestListener;
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin;
import org.elasticsearch.gradle.plugin.BasePluginBuildPlugin;
import org.elasticsearch.gradle.plugin.PluginBuildPlugin;
import org.elasticsearch.gradle.plugin.PluginPropertiesExtension;
Expand Down Expand Up @@ -116,12 +116,12 @@ public void apply(Project project) {
extractedPluginsConfiguration.extendsFrom(pluginsConfiguration);
configureArtifactTransforms(project);

// Create configuration for aggregating historical feature metadata
// Create configuration for aggregating feature metadata
FileCollection featureMetadataConfig = project.getConfigurations().create(FEATURES_METADATA_CONFIGURATION, c -> {
c.setCanBeConsumed(false);
c.setCanBeResolved(true);
c.attributes(
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
);
c.defaultDependencies(d -> d.add(project.getDependencies().project(Map.of("path", ":server"))));
c.withDependencies(dependencies -> {
Expand All @@ -136,10 +136,7 @@ public void apply(Project project) {
c.setCanBeConsumed(false);
c.setCanBeResolved(true);
c.attributes(
a -> a.attribute(
ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE,
HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE
)
a -> a.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
);
c.defaultDependencies(
d -> d.add(project.getDependencies().project(Map.of("path", ":distribution", "configuration", "featuresMetadata")))
Expand Down
4 changes: 2 additions & 2 deletions distribution/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.internal.ConcatFilesTask
import org.elasticsearch.gradle.internal.DependenciesInfoPlugin
import org.elasticsearch.gradle.internal.NoticeTask
import org.elasticsearch.gradle.internal.test.HistoricalFeaturesMetadataPlugin
import org.elasticsearch.gradle.internal.test.ClusterFeaturesMetadataPlugin

import java.nio.file.Files
import java.nio.file.Path
Expand All @@ -33,7 +33,7 @@ configurations {
}
featuresMetadata {
attributes {
attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, HistoricalFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ClusterFeaturesMetadataPlugin.FEATURES_METADATA_TYPE)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public Set<String> allNodeFeatures() {
/**
* {@code true} if {@code feature} is present on all nodes in the cluster.
* <p>
* NOTE: This should not be used directly, as it does not read historical features.
* NOTE: This should not be used directly.
* Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead.
*/
@SuppressForbidden(reason = "directly reading cluster features")
Expand Down
69 changes: 4 additions & 65 deletions server/src/main/java/org/elasticsearch/features/FeatureData.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,25 +9,19 @@

package org.elasticsearch.features;

import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;

import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeMap;

import static org.elasticsearch.features.FeatureService.CLUSTER_FEATURES_ADDED_VERSION;

/**
* Reads and consolidate features exposed by a list {@link FeatureSpecification}, grouping them into historical features and node
* features for the consumption of {@link FeatureService}
* Reads and consolidate features exposed by a list {@link FeatureSpecification},
* grouping them together for the consumption of {@link FeatureService}
*/
public class FeatureData {

Expand All @@ -40,19 +34,14 @@ public class FeatureData {
}
}

private final NavigableMap<Version, Set<String>> historicalFeatures;
private final Map<String, NodeFeature> nodeFeatures;

private FeatureData(NavigableMap<Version, Set<String>> historicalFeatures, Map<String, NodeFeature> nodeFeatures) {
this.historicalFeatures = historicalFeatures;
private FeatureData(Map<String, NodeFeature> nodeFeatures) {
this.nodeFeatures = nodeFeatures;
}

public static FeatureData createFromSpecifications(List<? extends FeatureSpecification> specs) {
Map<String, FeatureSpecification> allFeatures = new HashMap<>();

// Initialize historicalFeatures with empty version to guarantee there's a floor entry for every version
NavigableMap<Version, Set<String>> historicalFeatures = new TreeMap<>(Map.of(Version.V_EMPTY, Set.of()));
Map<String, NodeFeature> nodeFeatures = new HashMap<>();
for (FeatureSpecification spec : specs) {
Set<NodeFeature> specFeatures = spec.getFeatures();
Expand All @@ -61,39 +50,6 @@ public static FeatureData createFromSpecifications(List<? extends FeatureSpecifi
specFeatures.addAll(spec.getTestFeatures());
}

for (var hfe : spec.getHistoricalFeatures().entrySet()) {
FeatureSpecification existing = allFeatures.putIfAbsent(hfe.getKey().id(), spec);
// the same SPI class can be loaded multiple times if it's in the base classloader
if (existing != null && existing.getClass() != spec.getClass()) {
throw new IllegalArgumentException(
Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", hfe.getKey().id(), existing, spec)
);
}

if (hfe.getValue().after(CLUSTER_FEATURES_ADDED_VERSION)) {
throw new IllegalArgumentException(
Strings.format(
"Historical feature [%s] declared by [%s] for version [%s] is not a historical version",
hfe.getKey().id(),
spec,
hfe.getValue()
)
);
}

if (specFeatures.contains(hfe.getKey())) {
throw new IllegalArgumentException(
Strings.format(
"Feature [%s] cannot be declared as both a regular and historical feature by [%s]",
hfe.getKey().id(),
spec
)
);
}

historicalFeatures.computeIfAbsent(hfe.getValue(), k -> new HashSet<>()).add(hfe.getKey().id());
}

for (NodeFeature f : specFeatures) {
FeatureSpecification existing = allFeatures.putIfAbsent(f.id(), spec);
if (existing != null && existing.getClass() != spec.getClass()) {
Expand All @@ -106,24 +62,7 @@ public static FeatureData createFromSpecifications(List<? extends FeatureSpecifi
}
}

return new FeatureData(consolidateHistoricalFeatures(historicalFeatures), Map.copyOf(nodeFeatures));
}

private static NavigableMap<Version, Set<String>> consolidateHistoricalFeatures(
NavigableMap<Version, Set<String>> declaredHistoricalFeatures
) {
// update each version by adding in all features from previous versions
Set<String> featureAggregator = new HashSet<>();
for (Map.Entry<Version, Set<String>> versions : declaredHistoricalFeatures.entrySet()) {
featureAggregator.addAll(versions.getValue());
versions.setValue(Set.copyOf(featureAggregator));
}

return Collections.unmodifiableNavigableMap(declaredHistoricalFeatures);
}

public NavigableMap<Version, Set<String>> getHistoricalFeatures() {
return historicalFeatures;
return new FeatureData(Map.copyOf(nodeFeatures));
}

public Map<String, NodeFeature> getNodeFeatures() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,13 @@

package org.elasticsearch.features;

import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.logging.LogManager;
import org.elasticsearch.logging.Logger;

import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;

/**
* Manages information on the features supported by nodes in the cluster.
Expand All @@ -34,9 +31,6 @@ public class FeatureService {

private static final Logger logger = LogManager.getLogger(FeatureService.class);

public static final Version CLUSTER_FEATURES_ADDED_VERSION = Version.V_8_12_0;

private final NavigableMap<Version, Set<String>> historicalFeatures;
private final Map<String, NodeFeature> nodeFeatures;

/**
Expand All @@ -47,13 +41,12 @@ public FeatureService(List<? extends FeatureSpecification> specs) {

var featureData = FeatureData.createFromSpecifications(specs);
nodeFeatures = featureData.getNodeFeatures();
historicalFeatures = featureData.getHistoricalFeatures();

logger.info("Registered local node features {}", nodeFeatures.keySet().stream().sorted().toList());
}

/**
* The non-historical features supported by this node.
* The features supported by this node.
* @return Map of {@code feature-id} to its declaring {@code NodeFeature} object.
*/
public Map<String, NodeFeature> getNodeFeatures() {
Expand All @@ -65,11 +58,6 @@ public Map<String, NodeFeature> getNodeFeatures() {
*/
@SuppressForbidden(reason = "We need basic feature information from cluster state")
public boolean clusterHasFeature(ClusterState state, NodeFeature feature) {
if (state.clusterFeatures().clusterHasFeature(feature)) {
return true;
}

var features = historicalFeatures.floorEntry(state.getNodes().getMinNodeVersion());
return features != null && features.getValue().contains(feature.id());
return state.clusterFeatures().clusterHasFeature(feature);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,6 @@

package org.elasticsearch.features;

import org.elasticsearch.Version;

import java.util.Map;
import java.util.Set;

/**
Expand Down Expand Up @@ -49,12 +46,4 @@ default Set<NodeFeature> getFeatures() {
default Set<NodeFeature> getTestFeatures() {
return Set.of();
}

/**
* Returns information on historical features that should be deemed to be present on all nodes
* on or above the {@link Version} specified.
*/
default Map<NodeFeature, Version> getHistoricalFeatures() {
return Map.of();
}
}
Loading

0 comments on commit e701697

Please sign in to comment.