Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce chunked version of ToXContentObject #92549

Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,7 @@ public String toString() {
+ version
+ '\''
+ ", exception='"
+ exception.getMessage()
+ (exception == null ? "null" : exception.getMessage())
+ "'"
+ '}';
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.snapshots.SnapshotInfo;
import org.elasticsearch.xcontent.ConstructingObjectParser;
Expand All @@ -33,7 +33,7 @@
/**
* Get snapshots response
*/
public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXContent {
public class GetSnapshotsResponse extends ActionResponse implements ChunkedToXContentObject {

private static final int UNKNOWN_COUNT = -1;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotId;
Expand All @@ -43,7 +43,7 @@
/**
* Status of a snapshot
*/
public class SnapshotStatus implements ChunkedToXContent, Writeable {
public class SnapshotStatus implements ChunkedToXContentObject, Writeable {

private final Snapshot snapshot;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContent;
Expand All @@ -28,7 +28,7 @@
/**
* Snapshot status response
*/
public class SnapshotsStatusResponse extends ActionResponse implements ChunkedToXContent {
public class SnapshotsStatusResponse extends ActionResponse implements ChunkedToXContentObject {

private final List<SnapshotStatus> snapshots;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
Expand All @@ -28,7 +28,7 @@
import java.util.Map;
import java.util.Objects;

public class GetSettingsResponse extends ActionResponse implements ChunkedToXContent {
public class GetSettingsResponse extends ActionResponse implements ChunkedToXContentObject {

private final Map<String, Settings> indexToSettings;
private final Map<String, Settings> indexToDefaultSettings;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.xcontent.ConstructingObjectParser;
Expand All @@ -34,7 +34,7 @@
/**
* Response for {@link FieldCapabilitiesRequest} requests.
*/
public class FieldCapabilitiesResponse extends ActionResponse implements ChunkedToXContent {
public class FieldCapabilitiesResponse extends ActionResponse implements ChunkedToXContentObject {
private static final ParseField INDICES_FIELD = new ParseField("indices");
private static final ParseField FIELDS_FIELD = new ParseField("fields");
private static final ParseField FAILED_INDICES_FIELD = new ParseField("failed_indices");
Expand Down Expand Up @@ -241,6 +241,9 @@ public int hashCode() {

@Override
public String toString() {
if (indexResponses.size() > 0) {
return "FieldCapabilitiesResponse{unmerged}";
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not great but we shouldn't throw on toString ever so we have to have some breakout here for the un-merged case that doesn't allow toXContent.

}
return Strings.toString(this);
}
}
13 changes: 11 additions & 2 deletions server/src/main/java/org/elasticsearch/common/Strings.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,10 @@
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.ChunkedToXContent;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
Expand Down Expand Up @@ -756,7 +758,7 @@ public static String toString(ToXContent toXContent) {
*/
@Deprecated
public static String toString(ChunkedToXContent chunkedToXContent) {
return toString(ChunkedToXContent.wrapAsXContentObject(chunkedToXContent));
return toString(chunkedToXContent, false, false);
}

/**
Expand Down Expand Up @@ -795,7 +797,14 @@ public static String toString(ToXContent toXContent, boolean pretty, boolean hum
*/
@Deprecated
public static String toString(ChunkedToXContent chunkedToXContent, boolean pretty, boolean human) {
return toString(ChunkedToXContent.wrapAsXContentObject(chunkedToXContent), pretty, human);
final ChunkedToXContent wrapped = chunkedToXContent.isFragment()
? params -> Iterators.concat(
ChunkedToXContentHelper.startObject(),
chunkedToXContent.toXContentChunked(params),
ChunkedToXContentHelper.endObject()
)
: chunkedToXContent;
return toString(ChunkedToXContent.wrapAsXContentObject(wrapped), pretty, human);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,8 @@ static ToXContentObject wrapAsXContentObject(ChunkedToXContent chunkedToXContent
return builder;
};
}

default boolean isFragment() {
return true;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.xcontent;

public interface ChunkedToXContentObject extends ChunkedToXContent {

@Override
default boolean isFragment() {
return false;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -162,4 +162,8 @@ public int hashCode() {
return Objects.hash(super.hashCode(), reduceScript, aggregations);
}

@Override
public String toString() {
return "InternalScriptedMetric{" + reduceScript + "}";
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Had to do something here since the parent class's toString would call back to toXContent here which would throw if the instance hasn't yet been reduced.

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,9 @@ public void testFromXContent() throws IOException {
.asMatchPredicate()
.or(Pattern.compile("snapshots\\.\\d+\\.index_details").asMatchPredicate())
.or(Pattern.compile("failures\\.*").asMatchPredicate());
chunkedXContentTester(this::createParser, (XContentType t) -> createTestInstance(), params, this::doParseInstance, false)
.numberOfTestRuns(1)
chunkedXContentTester(this::createParser, (XContentType t) -> createTestInstance(), params, this::doParseInstance).numberOfTestRuns(
1
)
.supportsUnknownFields(true)
.shuffleFieldsExceptions(Strings.EMPTY_ARRAY)
.randomFieldsExcludeFilter(predicate)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,12 +44,13 @@ private static Map<String, DiskUsage> randomDiskUsage() {
Map<String, DiskUsage> builder = new HashMap<>(numEntries);
for (int i = 0; i < numEntries; i++) {
String key = randomAlphaOfLength(32);
final int totalBytes = randomIntBetween(0, Integer.MAX_VALUE);
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Had to fix this to use plausible values, otherwise toString would run into negative byte values which would throw illegal argument ex.

DiskUsage diskUsage = new DiskUsage(
randomAlphaOfLength(4),
randomAlphaOfLength(4),
randomAlphaOfLength(4),
randomIntBetween(0, Integer.MAX_VALUE),
randomIntBetween(0, Integer.MAX_VALUE)
totalBytes,
randomIntBetween(0, totalBytes)
);
builder.put(key, diskUsage);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,4 @@ protected DataStreamMetadata doParseInstance(XContentParser parser) throws IOExc
protected Writeable.Reader<DataStreamMetadata> instanceReader() {
return DataStreamMetadata::new;
}

@Override
protected boolean isFragment() {
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,4 @@ private static DesiredNodesMetadata randomDesiredNodesMetadata() {
private DesiredNodesMetadata mutate(DesiredNodesMetadata base) {
return new DesiredNodesMetadata(mutateDesiredNodes(base.getLatestDesiredNodes()));
}

@Override
protected boolean isFragment() {
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,8 @@
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.XContentElasticsearchExtension;
import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase;
Expand Down Expand Up @@ -70,15 +68,7 @@ public void testXContent() throws IOException {
if (graveyard.getTombstones().size() > 0) {
// check that date properly printed
assertThat(
Strings.toString(
ignored -> Iterators.concat(
ChunkedToXContentHelper.startObject(),
graveyard.toXContentChunked(ToXContent.EMPTY_PARAMS),
ChunkedToXContentHelper.endObject()
),
false,
true
),
Strings.toString(graveyard, false, true),
containsString(
XContentElasticsearchExtension.DEFAULT_FORMATTER.format(
Instant.ofEpochMilli(graveyard.getTombstones().get(0).getDeleteDateInMillis())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,4 @@ protected Metadata.Custom makeTestChanges(Metadata.Custom testInstance) {
protected Metadata.Custom mutateInstance(Metadata.Custom instance) throws IOException {
return makeTestChanges(instance);
}

@Override
protected boolean isFragment() {
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -396,9 +396,4 @@ private Assignment randomAssignment() {
}
return new Assignment(randomAlphaOfLength(10), randomAlphaOfLength(10));
}

@Override
protected boolean isFragment() {
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -170,9 +170,4 @@ protected ScriptMetadata doParseInstance(XContentParser parser) {
throw new UncheckedIOException(ioe);
}
}

@Override
protected boolean isFragment() {
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@
import java.util.function.Predicate;

import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.hamcrest.Matchers.emptyString;
import static org.hamcrest.Matchers.not;

public class AggregationProfileShardResultTests extends AbstractXContentSerializingTestCase<AggregationProfileShardResult> {

Expand Down Expand Up @@ -120,10 +118,4 @@ public void testToXContent() throws IOException {
}"""), xContent.utf8ToString());
}

public void testToString() {
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Moved to the parent now to deal with this for all the things we serialize

final String toString = createTestInstance().toString();
assertNotNull(toString);
assertThat(toString, not(emptyString()));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -242,4 +242,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
}
});
}

@Override
public void testToString() throws Exception {
// override parent since empty sort value serializes to empty string
assertNotNull(createTestInstance().toString());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,4 @@ protected Custom doParseInstance(XContentParser parser) throws IOException {
repos.sort(Comparator.comparing(RepositoryMetadata::name));
return new RepositoriesMetadata(repos);
}

@Override
protected boolean isFragment() {
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,4 @@ protected Metadata.Custom makeTestChanges(Metadata.Custom testInstance) {
protected Writeable.Reader<Diff<Metadata.Custom>> diffReader() {
return FeatureMigrationResults.ResultsDiff::new;
}

@Override
protected boolean isFragment() {
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,12 @@ public abstract class AbstractChunkedSerializingTestCase<T extends ChunkedToXCon

@Override
protected AbstractXContentTestCase.XContentTester<T> createXContentTester() {
return chunkedXContentTester(
this::createParser,
this::createXContextTestInstance,
getToXContentParams(),
this::doParseInstance,
isFragment()
);
return chunkedXContentTester(this::createParser, this::createXContextTestInstance, getToXContentParams(), this::doParseInstance);
}

@Override
protected ToXContent asXContent(T instance) {
if (isFragment()) {
if (instance.isFragment()) {
return (ToXContentObject) ((builder, params) -> {
builder.startObject();
ChunkedToXContent.wrapAsXContentObject(instance).toXContent(builder, params);
Expand All @@ -53,8 +47,4 @@ protected T createXContextTestInstance(XContentType xContentType) {
* Parses to a new instance using the provided {@link XContentParser}
*/
protected abstract T doParseInstance(XContentParser parser) throws IOException;

protected boolean isFragment() {
return false;
}
}
Loading