Skip to content

Commit

Permalink
Remove ChunkedToXContentBuilder (elastic#119310)
Browse files Browse the repository at this point in the history
Reverts the introduction of the ChunkedToXContentBuilder to fix the various performance regressions it introduced and the theoretical impossibility of fixing its performance to rival that of the iterator based solution.
With the exception of a few minor adjustments that came out of changes already made on top of the builder migration this simply returns to the previous implementations (and some of the stuff in that code could be done better with the utilities available now).
I also verified that this solves the performance issues that we've been running into with the builder.

closes elastic#118647

This reverts commit 918a9cc
This reverts commit 8c37875
This reverts commit 11c2eb2
This reverts commit c311515
  • Loading branch information
original-brownbear authored Dec 27, 2024
1 parent ec35dc2 commit c0553d4
Show file tree
Hide file tree
Showing 64 changed files with 787 additions and 1,100 deletions.
6 changes: 6 additions & 0 deletions docs/changelog/119310.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 119310
summary: Remove ChunkedToXContentBuilder
area: "Network"
type: bug
issues:
- 118647
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,10 @@
import org.elasticsearch.cluster.DiffableUtils;
import org.elasticsearch.cluster.NamedDiff;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.ingest.geoip.direct.DatabaseConfigurationMetadata;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ParseField;
Expand Down Expand Up @@ -90,8 +91,8 @@ public static IngestGeoIpMetadata fromXContent(XContentParser parser) throws IOE
}

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContent.builder(params).xContentObjectFields(DATABASES_FIELD.getPreferredName(), databases);
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params ignored) {
return Iterators.concat(ChunkedToXContentHelper.xContentValuesMap(DATABASES_FIELD.getPreferredName(), databases));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings;
import org.elasticsearch.cluster.routing.allocation.NodeAllocationStats;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.discovery.DiscoveryStats;
import org.elasticsearch.http.HttpStats;
Expand All @@ -38,10 +40,13 @@
import org.elasticsearch.xcontent.ToXContent;

import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;

import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.singleChunk;

/**
* Node statistics (dynamic, changes depending on when created).
*/
Expand Down Expand Up @@ -342,7 +347,7 @@ public void writeTo(StreamOutput out) throws IOException {

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params outerParams) {
return ChunkedToXContent.builder(outerParams).append((builder, params) -> {
return Iterators.concat(singleChunk((builder, params) -> {
builder.field("name", getNode().getName());
builder.field("transport_address", getNode().getAddress().toString());
builder.field("host", getNode().getHostName());
Expand All @@ -353,38 +358,47 @@ public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params outerP
builder.value(role.roleName());
}
builder.endArray();

if (getNode().getAttributes().isEmpty() == false) {
builder.startObject("attributes");
for (Map.Entry<String, String> attrEntry : getNode().getAttributes().entrySet()) {
builder.field(attrEntry.getKey(), attrEntry.getValue());
}
builder.endObject();
}

return builder;
})

.appendIfPresent(getIndices())
.append((builder, p) -> builder.value(ifPresent(getOs()), p).value(ifPresent(getProcess()), p).value(ifPresent(getJvm()), p))

.appendIfPresent(getThreadPool())
.appendIfPresent(getFs())
.appendIfPresent(getTransport())
.appendIfPresent(getHttp())
.appendIfPresent(getBreaker())
.appendIfPresent(getScriptStats())
.appendIfPresent(getDiscoveryStats())
.appendIfPresent(getIngestStats())
.appendIfPresent(getAdaptiveSelectionStats())
.appendIfPresent(getScriptCacheStats())
.append(
}),
ifPresent(getIndices()).toXContentChunked(outerParams),
singleChunk(
(builder, p) -> builder.value(ifPresent(getOs()), p).value(ifPresent(getProcess()), p).value(ifPresent(getJvm()), p)
),
ifPresent(getThreadPool()).toXContentChunked(outerParams),
singleChunkIfPresent(getFs()),
ifPresent(getTransport()).toXContentChunked(outerParams),
ifPresent(getHttp()).toXContentChunked(outerParams),
singleChunkIfPresent(getBreaker()),
ifPresent(getScriptStats()).toXContentChunked(outerParams),
singleChunkIfPresent(getDiscoveryStats()),
ifPresent(getIngestStats()).toXContentChunked(outerParams),
singleChunkIfPresent(getAdaptiveSelectionStats()),
singleChunkIfPresent(getScriptCacheStats()),
singleChunk(
(builder, p) -> builder.value(ifPresent(getIndexingPressureStats()), p)
.value(ifPresent(getRepositoriesStats()), p)
.value(ifPresent(getNodeAllocationStats()), p)
);
)
);
}

private static ChunkedToXContent ifPresent(@Nullable ChunkedToXContent chunkedToXContent) {
return Objects.requireNonNullElse(chunkedToXContent, ChunkedToXContent.EMPTY);
}

private static ToXContent ifPresent(@Nullable ToXContent toXContent) {
return Objects.requireNonNullElse(toXContent, ToXContent.EMPTY);
}

private static Iterator<ToXContent> singleChunkIfPresent(ToXContent toXContent) {
return toXContent == null ? Collections.emptyIterator() : ChunkedToXContentHelper.singleChunk(toXContent);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@
import org.elasticsearch.action.support.nodes.BaseNodesXContentResponse;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.xcontent.ToXContent;

import java.io.IOException;
Expand All @@ -41,12 +42,15 @@ protected void writeNodesTo(StreamOutput out, List<NodeStats> nodes) throws IOEx

@Override
protected Iterator<? extends ToXContent> xContentChunks(ToXContent.Params outerParams) {
return ChunkedToXContent.builder(outerParams)
.object(
"nodes",
getNodes().iterator(),
(b, ns) -> b.object(ns.getNode().getId(), ob -> ob.field("timestamp", ns.getTimestamp()).append(ns))
);
return Iterators.concat(
ChunkedToXContentHelper.startObject("nodes"),
Iterators.flatMap(getNodes().iterator(), nodeStats -> Iterators.concat(Iterators.single((builder, params) -> {
builder.startObject(nodeStats.getNode().getId());
builder.field("timestamp", nodeStats.getTimestamp());
return builder;
}), nodeStats.toXContentChunked(outerParams), ChunkedToXContentHelper.endObject())),
ChunkedToXContentHelper.endObject()
);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,20 @@
import org.elasticsearch.action.support.master.IsAcknowledgedSupplier;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.allocation.RoutingExplanations;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.core.RestApiVersion;
import org.elasticsearch.core.UpdateForV10;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.xcontent.ToXContent;

import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.Objects;

Expand Down Expand Up @@ -92,14 +94,17 @@ public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params outerP
if (emitState(outerParams)) {
deprecationLogger.critical(DeprecationCategory.API, "reroute_cluster_state", STATE_FIELD_DEPRECATION_MESSAGE);
}
return ChunkedToXContent.builder(outerParams).object(b -> {
b.field(ACKNOWLEDGED_KEY, isAcknowledged());
if (emitState(outerParams)) {
b.xContentObject("state", state);
}
if (outerParams.paramAsBoolean("explain", false)) {
b.append(explanations);
}
});
return Iterators.concat(
Iterators.single((builder, params) -> builder.startObject().field(ACKNOWLEDGED_KEY, isAcknowledged())),
emitState(outerParams)
? ChunkedToXContentHelper.wrapWithObject("state", state.toXContentChunked(outerParams))
: Collections.emptyIterator(),
Iterators.single((builder, params) -> {
if (params.paramAsBoolean("explain", false)) {
explanations.toXContent(builder, params);
}
return builder.endObject();
})
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.xcontent.ToXContent;
Expand Down Expand Up @@ -158,13 +157,14 @@ public void writeTo(StreamOutput out) throws IOException {

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContent.builder(params).object(ob -> ob.append((b, p) -> {
b.field(ERRORS, hasFailures());
b.field(TOOK, tookInMillis);
return Iterators.concat(Iterators.single((builder, p) -> {
builder.startObject();
builder.field(ERRORS, hasFailures());
builder.field(TOOK, tookInMillis);
if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) {
b.field(INGEST_TOOK, ingestTookInMillis);
builder.field(INGEST_TOOK, ingestTookInMillis);
}
return b;
}).array(ITEMS, Iterators.forArray(responses)));
return builder.startArray(ITEMS);
}), Iterators.forArray(responses), Iterators.<ToXContent>single((builder, p) -> builder.endArray().endObject()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,13 @@
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.RefCounted;
Expand Down Expand Up @@ -382,17 +383,24 @@ public Clusters getClusters() {
@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
assert hasReferences();
return ChunkedToXContent.builder(params).xContentObject(innerToXContentChunked(params));
return Iterators.concat(
ChunkedToXContentHelper.startObject(),
this.innerToXContentChunked(params),
ChunkedToXContentHelper.endObject()
);
}

public Iterator<? extends ToXContent> innerToXContentChunked(ToXContent.Params params) {
return ChunkedToXContent.builder(params)
.append(SearchResponse.this::headerToXContent)
.append(clusters)
.append(hits)
.appendIfPresent(aggregations)
.appendIfPresent(suggest)
.appendIfPresent(profileResults);
return Iterators.concat(
ChunkedToXContentHelper.singleChunk(SearchResponse.this::headerToXContent),
Iterators.single(clusters),
Iterators.concat(
hits.toXContentChunked(params),
aggregations == null ? Collections.emptyIterator() : ChunkedToXContentHelper.singleChunk(aggregations),
suggest == null ? Collections.emptyIterator() : ChunkedToXContentHelper.singleChunk(suggest),
profileResults == null ? Collections.emptyIterator() : ChunkedToXContentHelper.singleChunk(profileResults)
)
);
}

public XContentBuilder headerToXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
package org.elasticsearch.action.support.broadcast;

import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.xcontent.ToXContent;
Expand All @@ -35,8 +36,11 @@ public ChunkedBroadcastResponse(

@Override
public final Iterator<ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContent.builder(params)
.object(ob -> ob.append((b, p) -> RestActions.buildBroadcastShardsHeader(b, p, this)).append(this::customXContentChunks));
return Iterators.concat(Iterators.single((b, p) -> {
b.startObject();
RestActions.buildBroadcastShardsHeader(b, p, this);
return b;
}), customXContentChunks(params), ChunkedToXContentHelper.endObject());
}

protected abstract Iterator<ToXContent> customXContentChunks(ToXContent.Params params);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@

import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.xcontent.ToXContent;
Expand All @@ -29,12 +30,11 @@ protected BaseNodesXContentResponse(ClusterName clusterName, List<TNodeResponse>

@Override
public final Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContent.builder(params)
.object(
ob -> ob.append((b, p) -> RestActions.buildNodesHeader(b, p, this))
.field("cluster_name", getClusterName().value())
.append(xContentChunks(params))
);
return Iterators.concat(Iterators.single((b, p) -> {
b.startObject();
RestActions.buildNodesHeader(b, p, this);
return b.field("cluster_name", getClusterName().value());
}), xContentChunks(params), ChunkedToXContentHelper.endObject());
}

protected abstract Iterator<? extends ToXContent> xContentChunks(ToXContent.Params outerParams);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@

import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.xcontent.ToXContent;
Expand Down Expand Up @@ -278,12 +279,15 @@ public ClusterFeatures apply(ClusterFeatures part) {

@Override
public Iterator<? extends ToXContent> toXContentChunked(ToXContent.Params params) {
return ChunkedToXContent.builder(params)
.array(nodeFeatures.entrySet().stream().sorted(Map.Entry.comparingByKey()).iterator(), e -> (builder, p) -> {
return Iterators.concat(
ChunkedToXContentHelper.startArray(),
nodeFeatures.entrySet().stream().sorted(Map.Entry.comparingByKey()).<ToXContent>map(e -> (builder, p) -> {
String[] features = e.getValue().toArray(String[]::new);
Arrays.sort(features);
return builder.startObject().field("node_id", e.getKey()).array("features", features).endObject();
});
}).iterator(),
ChunkedToXContentHelper.endArray()
);
}

@Override
Expand Down
Loading

0 comments on commit c0553d4

Please sign in to comment.