failures = new ArrayList<>();
+
+ String currentFieldName = null;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token.isValue()) {
+ if (SearchResponse.Cluster.INDICES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ indexExpression = parser.text();
+ } else if (SearchResponse.Cluster.STATUS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ status = parser.text();
+ } else if (SearchResponse.TIMED_OUT.match(currentFieldName, parser.getDeprecationHandler())) {
+ timedOut = parser.booleanValue();
+ } else if (SearchResponse.TOOK.match(currentFieldName, parser.getDeprecationHandler())) {
+ took = parser.longValue();
+ } else {
+ parser.skipChildren();
+ }
+ } else if (RestActions._SHARDS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ while ((token = parser.nextToken()) != Token.END_OBJECT) {
+ if (token == Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token.isValue()) {
+ if (RestActions.FAILED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ failedShards = parser.intValue();
+ } else if (RestActions.SUCCESSFUL_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ successfulShards = parser.intValue();
+ } else if (RestActions.TOTAL_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ totalShards = parser.intValue();
+ } else if (RestActions.SKIPPED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ skippedShards = parser.intValue();
+ } else {
+ parser.skipChildren();
+ }
+ } else {
+ parser.skipChildren();
+ }
+ }
+ } else if (token == Token.START_ARRAY) {
+ if (RestActions.FAILURES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ while (parser.nextToken() != Token.END_ARRAY) {
+ failures.add(ParsedShardSearchFailure.fromXContent(parser));
+ }
+ } else {
+ parser.skipChildren();
+ }
+ } else {
+ parser.skipChildren();
+ }
+ }
+
+ Integer totalShardsFinal = totalShards == -1 ? null : totalShards;
+ Integer successfulShardsFinal = successfulShards == -1 ? null : successfulShards;
+ Integer skippedShardsFinal = skippedShards == -1 ? null : skippedShards;
+ Integer failedShardsFinal = failedShards == -1 ? null : failedShards;
+ TimeValue tookTimeValue = took == -1L ? null : new TimeValue(took);
+ boolean skipUnavailable = SearchResponse.Cluster.SKIP_UNAVAILABLE_DEFAULT; // skipUnavailable is not exposed to XContent, so just use default
+
+ return new SearchResponse.Cluster(
+ clusterName,
+ indexExpression,
+ skipUnavailable,
+ SearchResponse.Cluster.Status.valueOf(status.toUpperCase(Locale.ROOT)),
+ totalShardsFinal,
+ successfulShardsFinal,
+ skippedShardsFinal,
+ failedShardsFinal,
+ failures,
+ tookTimeValue,
+ timedOut
+ );
+ }
+ }
+}
diff --git a/src/main/java/org/elasticsearch/action/search/ParsedShardSearchFailure.java b/src/main/java/org/elasticsearch/action/search/ParsedShardSearchFailure.java
new file mode 100644
index 000000000..ea3bb74a9
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/search/ParsedShardSearchFailure.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.action.search;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.cluster.metadata.IndexMetadata;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.search.SearchShardTarget;
+import org.elasticsearch.transport.RemoteClusterAware;
+import org.elasticsearch.xcontent.XContentParser;
+
+import java.io.IOException;
+
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
+
+/**
+ * Represents a failure to search on a specific shard.
+ */
+public class ParsedShardSearchFailure {
+
+ public static ShardSearchFailure fromXContent(XContentParser parser) throws IOException {
+ XContentParser.Token token;
+ ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
+ String currentFieldName = null;
+ int shardId = -1;
+ String indexName = null;
+ String clusterAlias = null;
+ String nodeId = null;
+ ElasticsearchException exception = null;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token.isValue()) {
+ if (ShardSearchFailure.SHARD_FIELD.equals(currentFieldName)) {
+ shardId = parser.intValue();
+ } else if (ShardSearchFailure.INDEX_FIELD.equals(currentFieldName)) {
+ String[] split = RemoteClusterAware.splitIndexName(parser.text());
+ clusterAlias = split[0];
+ indexName = split[1];
+ } else if (ShardSearchFailure.NODE_FIELD.equals(currentFieldName)) {
+ nodeId = parser.text();
+ } else {
+ parser.skipChildren();
+ }
+ } else if (token == XContentParser.Token.START_OBJECT) {
+ if (ShardSearchFailure.REASON_FIELD.equals(currentFieldName)) {
+ exception = ElasticsearchException.fromXContent(parser);
+ } else {
+ parser.skipChildren();
+ }
+ } else {
+ parser.skipChildren();
+ }
+ }
+ SearchShardTarget searchShardTarget = null;
+ if (nodeId != null) {
+ searchShardTarget = new SearchShardTarget(
+ nodeId,
+ new ShardId(new Index(indexName, IndexMetadata.INDEX_UUID_NA_VALUE), shardId),
+ clusterAlias
+ );
+ }
+ return new ShardSearchFailure(exception, searchShardTarget);
+ }
+}
diff --git a/src/main/java/org/elasticsearch/action/support/master/ParsedAcknowledgedResponse.java b/src/main/java/org/elasticsearch/action/support/master/ParsedAcknowledgedResponse.java
new file mode 100644
index 000000000..1b724e464
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/support/master/ParsedAcknowledgedResponse.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+package org.elasticsearch.action.support.master;
+
+import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.xcontent.ConstructingObjectParser;
+import org.elasticsearch.xcontent.ObjectParser;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentParser;
+
+import java.io.IOException;
+
+import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * A response to an action which updated the cluster state, but needs to report whether any relevant nodes failed to apply the update. For
+ * instance, a {@link org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest} may update a mapping in the index metadata, but
+ * one or more data nodes may fail to acknowledge the new mapping within the ack timeout. If this happens then clients must accept that
+ * subsequent requests that rely on the mapping update may return errors from the lagging data nodes.
+ *
+ * Actions which return a payload-free acknowledgement of success should generally prefer to use {@link ActionResponse.Empty} instead of
+ * {@link AcknowledgedResponse}, and other listeners should generally prefer {@link Void}.
+ */
+public class ParsedAcknowledgedResponse {
+
+ private static final ParseField ACKNOWLEDGED = new ParseField(AcknowledgedResponse.ACKNOWLEDGED_KEY);
+
+ public static void declareAcknowledgedField(ConstructingObjectParser objectParser) {
+ objectParser.declareField(
+ constructorArg(),
+ (parser, context) -> parser.booleanValue(),
+ ACKNOWLEDGED,
+ ObjectParser.ValueType.BOOLEAN
+ );
+ }
+
+ /**
+ * A generic parser that simply parses the acknowledged flag
+ */
+ private static final ConstructingObjectParser ACKNOWLEDGED_FLAG_PARSER = new ConstructingObjectParser<>(
+ "acknowledged_flag",
+ true,
+ args -> (Boolean) args[0]
+ );
+
+ static {
+ ACKNOWLEDGED_FLAG_PARSER.declareField(
+ constructorArg(),
+ (parser, context) -> parser.booleanValue(),
+ ACKNOWLEDGED,
+ ObjectParser.ValueType.BOOLEAN
+ );
+ }
+
+ public static AcknowledgedResponse fromXContent(XContentParser parser) throws IOException {
+ return AcknowledgedResponse.of(ACKNOWLEDGED_FLAG_PARSER.apply(parser, null));
+ }
+}
diff --git a/src/main/java/org/elasticsearch/action/support/master/ParsedShardsAcknowledgedResponse.java b/src/main/java/org/elasticsearch/action/support/master/ParsedShardsAcknowledgedResponse.java
new file mode 100644
index 000000000..6c58f3a16
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/support/master/ParsedShardsAcknowledgedResponse.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.action.support.master;
+
+import org.elasticsearch.xcontent.ConstructingObjectParser;
+import org.elasticsearch.xcontent.ObjectParser;
+
+import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
+
+public class ParsedShardsAcknowledgedResponse {
+
+ public static void declareAcknowledgedAndShardsAcknowledgedFields(
+ ConstructingObjectParser objectParser
+ ) {
+ ParsedAcknowledgedResponse.declareAcknowledgedField(objectParser);
+ objectParser.declareField(
+ constructorArg(),
+ (parser, context) -> parser.booleanValue(),
+ ShardsAcknowledgedResponse.SHARDS_ACKNOWLEDGED,
+ ObjectParser.ValueType.BOOLEAN
+ );
+ }
+}
diff --git a/src/main/java/org/elasticsearch/action/update/ParsedUpdateResponse.java b/src/main/java/org/elasticsearch/action/update/ParsedUpdateResponse.java
new file mode 100644
index 000000000..ca1272cee
--- /dev/null
+++ b/src/main/java/org/elasticsearch/action/update/ParsedUpdateResponse.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.action.update;
+
+import org.elasticsearch.action.ParsedDocWriteResponse;
+import org.elasticsearch.index.get.GetResult;
+import org.elasticsearch.xcontent.XContentParser;
+
+import java.io.IOException;
+
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
+
+public class ParsedUpdateResponse {
+
+ public static UpdateResponse fromXContent(XContentParser parser) throws IOException {
+ ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
+
+ UpdateResponse.Builder context = new UpdateResponse.Builder();
+ while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
+ parseXContentFields(parser, context);
+ }
+ return context.build();
+ }
+
+ /**
+ * Parse the current token and update the parsing context appropriately.
+ */
+ public static void parseXContentFields(XContentParser parser, UpdateResponse.Builder context) throws IOException {
+ XContentParser.Token token = parser.currentToken();
+ String currentFieldName = parser.currentName();
+
+ if (UpdateResponse.GET.equals(currentFieldName)) {
+ if (token == XContentParser.Token.START_OBJECT) {
+ context.setGetResult(GetResult.fromXContentEmbedded(parser));
+ }
+ } else {
+ ParsedDocWriteResponse.parseInnerToXContent(parser, context);
+ }
+ }
+}
diff --git a/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java b/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java
new file mode 100644
index 000000000..9f4029313
--- /dev/null
+++ b/src/main/java/org/elasticsearch/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java
@@ -0,0 +1,221 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.aggregations.pipeline;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.TransportVersions;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
+import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
+import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.TreeMap;
+
+import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.BUCKETS_PATH;
+import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.GAP_POLICY;
+
+public class BucketSelectorPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder {
+ public static final String NAME = "bucket_selector";
+
+ private final Map bucketsPathsMap;
+ private final Script script;
+ private GapPolicy gapPolicy = GapPolicy.SKIP;
+
+ public BucketSelectorPipelineAggregationBuilder(String name, Map bucketsPathsMap, Script script) {
+ super(name, NAME, new TreeMap<>(bucketsPathsMap).values().toArray(new String[bucketsPathsMap.size()]));
+ this.bucketsPathsMap = bucketsPathsMap;
+ this.script = script;
+ }
+
+ /**
+ * Read from a stream.
+ */
+ public BucketSelectorPipelineAggregationBuilder(StreamInput in) throws IOException {
+ super(in, NAME);
+ bucketsPathsMap = in.readMap(StreamInput::readString, StreamInput::readString);
+ script = new Script(in);
+ gapPolicy = GapPolicy.readFrom(in);
+ }
+
+ @Override
+ protected void doWriteTo(StreamOutput out) throws IOException {
+ out.writeMap(bucketsPathsMap, StreamOutput::writeString, StreamOutput::writeString);
+ script.writeTo(out);
+ gapPolicy.writeTo(out);
+ }
+
+ /**
+ * Sets the gap policy to use for this aggregation.
+ */
+ public BucketSelectorPipelineAggregationBuilder gapPolicy(GapPolicy gapPolicy) {
+ if (gapPolicy == null) {
+ throw new IllegalArgumentException("[gapPolicy] must not be null: [" + name + "]");
+ }
+ this.gapPolicy = gapPolicy;
+ return this;
+ }
+
+ /**
+ * Gets the gap policy to use for this aggregation.
+ */
+ public GapPolicy gapPolicy() {
+ return gapPolicy;
+ }
+
+ @Override
+ protected PipelineAggregator createInternal(Map metadata) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.field(BUCKETS_PATH.getPreferredName(), bucketsPathsMap);
+ builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script);
+ builder.field(GAP_POLICY.getPreferredName(), gapPolicy.getName());
+ return builder;
+ }
+
+ public static BucketSelectorPipelineAggregationBuilder parse(String reducerName, XContentParser parser) throws IOException {
+ XContentParser.Token token;
+ Script script = null;
+ String currentFieldName = null;
+ Map bucketsPathsMap = null;
+ GapPolicy gapPolicy = null;
+
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token == XContentParser.Token.VALUE_STRING) {
+ if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) {
+ bucketsPathsMap = new HashMap<>();
+ bucketsPathsMap.put("_value", parser.text());
+ } else if (GAP_POLICY.match(currentFieldName, parser.getDeprecationHandler())) {
+ gapPolicy = GapPolicy.parse(parser.text(), parser.getTokenLocation());
+ } else if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ script = Script.parse(parser);
+ } else {
+ throw new ParsingException(
+ parser.getTokenLocation(),
+ "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."
+ );
+ }
+ } else if (token == XContentParser.Token.START_ARRAY) {
+ if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) {
+ List paths = new ArrayList<>();
+ while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
+ String path = parser.text();
+ paths.add(path);
+ }
+ bucketsPathsMap = new HashMap<>();
+ for (int i = 0; i < paths.size(); i++) {
+ bucketsPathsMap.put("_value" + i, paths.get(i));
+ }
+ } else {
+ throw new ParsingException(
+ parser.getTokenLocation(),
+ "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."
+ );
+ }
+ } else if (token == XContentParser.Token.START_OBJECT) {
+ if (Script.SCRIPT_PARSE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ script = Script.parse(parser);
+ } else if (BUCKETS_PATH.match(currentFieldName, parser.getDeprecationHandler())) {
+ Map map = parser.map();
+ bucketsPathsMap = new HashMap<>();
+ for (Map.Entry entry : map.entrySet()) {
+ bucketsPathsMap.put(entry.getKey(), String.valueOf(entry.getValue()));
+ }
+ } else {
+ throw new ParsingException(
+ parser.getTokenLocation(),
+ "Unknown key for a " + token + " in [" + reducerName + "]: [" + currentFieldName + "]."
+ );
+ }
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + reducerName + "].");
+ }
+ }
+
+ if (bucketsPathsMap == null) {
+ throw new ParsingException(
+ parser.getTokenLocation(),
+ "Missing required field [" + BUCKETS_PATH.getPreferredName() + "] for bucket_selector aggregation [" + reducerName + "]"
+ );
+ }
+
+ if (script == null) {
+ throw new ParsingException(
+ parser.getTokenLocation(),
+ "Missing required field ["
+ + Script.SCRIPT_PARSE_FIELD.getPreferredName()
+ + "] for bucket_selector aggregation ["
+ + reducerName
+ + "]"
+ );
+ }
+
+ BucketSelectorPipelineAggregationBuilder factory = new BucketSelectorPipelineAggregationBuilder(
+ reducerName,
+ bucketsPathsMap,
+ script
+ );
+ if (gapPolicy != null) {
+ factory.gapPolicy(gapPolicy);
+ }
+ return factory;
+ }
+
+ @Override
+ protected void validate(ValidationContext context) {
+ context.validateHasParent(NAME, name);
+ }
+
+ @Override
+ protected boolean overrideBucketsPath() {
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), bucketsPathsMap, script, gapPolicy);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) return true;
+ if (obj == null || getClass() != obj.getClass()) return false;
+ if (super.equals(obj) == false) return false;
+
+ BucketSelectorPipelineAggregationBuilder other = (BucketSelectorPipelineAggregationBuilder) obj;
+ return Objects.equals(bucketsPathsMap, other.bucketsPathsMap)
+ && Objects.equals(script, other.script)
+ && Objects.equals(gapPolicy, other.gapPolicy);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersions.ZERO;
+ }
+}
diff --git a/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregationBuilder.java b/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregationBuilder.java
new file mode 100644
index 000000000..71a8ea494
--- /dev/null
+++ b/src/main/java/org/elasticsearch/aggregations/pipeline/MovFnPipelineAggregationBuilder.java
@@ -0,0 +1,246 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.aggregations.pipeline;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.TransportVersions;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder;
+import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
+import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+import org.elasticsearch.xcontent.ConstructingObjectParser;
+import org.elasticsearch.xcontent.ObjectParser;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+
+import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.BUCKETS_PATH;
+import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT;
+import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.GAP_POLICY;
+import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
+
+public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregationBuilder {
+ public static final String NAME = "moving_fn";
+ private static final ParseField WINDOW = new ParseField("window");
+ private static final ParseField SHIFT = new ParseField("shift");
+
+ private final Script script;
+ private final String bucketsPathString;
+ private String format = null;
+ private GapPolicy gapPolicy = GapPolicy.SKIP;
+ private int window;
+ private int shift;
+
+ public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ NAME,
+ false,
+ (args, name) -> new MovFnPipelineAggregationBuilder(name, (String) args[0], (Script) args[1], (int) args[2])
+ );
+ static {
+ PARSER.declareString(constructorArg(), BUCKETS_PATH_FIELD);
+ PARSER.declareField(
+ constructorArg(),
+ (p, c) -> Script.parse(p),
+ Script.SCRIPT_PARSE_FIELD,
+ ObjectParser.ValueType.OBJECT_OR_STRING
+ );
+ PARSER.declareInt(constructorArg(), WINDOW);
+
+ PARSER.declareInt(MovFnPipelineAggregationBuilder::setShift, SHIFT);
+ PARSER.declareString(MovFnPipelineAggregationBuilder::format, FORMAT);
+ PARSER.declareField(MovFnPipelineAggregationBuilder::gapPolicy, p -> {
+ if (p.currentToken() == XContentParser.Token.VALUE_STRING) {
+ return GapPolicy.parse(p.text().toLowerCase(Locale.ROOT), p.getTokenLocation());
+ }
+ throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
+ }, GAP_POLICY, ObjectParser.ValueType.STRING);
+ };
+
+ public MovFnPipelineAggregationBuilder(String name, String bucketsPath, Script script, int window) {
+ super(name, NAME, new String[] { bucketsPath });
+ this.bucketsPathString = bucketsPath;
+ this.script = script;
+ if (window <= 0) {
+ throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer.");
+ }
+ this.window = window;
+ }
+
+ public MovFnPipelineAggregationBuilder(StreamInput in) throws IOException {
+ super(in, NAME);
+ bucketsPathString = in.readString();
+ script = new Script(in);
+ format = in.readOptionalString();
+ gapPolicy = GapPolicy.readFrom(in);
+ window = in.readInt();
+ shift = in.readInt();
+ }
+
+ @Override
+ protected void doWriteTo(StreamOutput out) throws IOException {
+ out.writeString(bucketsPathString);
+ script.writeTo(out);
+ out.writeOptionalString(format);
+ gapPolicy.writeTo(out);
+ out.writeInt(window);
+ out.writeInt(shift);
+ }
+
+ /**
+ * Sets the format to use on the output of this aggregation.
+ */
+ public MovFnPipelineAggregationBuilder format(String format) {
+ if (Strings.isNullOrEmpty(format)) {
+ throw new IllegalArgumentException("[" + FORMAT.getPreferredName() + "] must not be null or an empty string.");
+ }
+ this.format = format;
+ return this;
+ }
+
+ /**
+ * Gets the format to use on the output of this aggregation.
+ */
+ public String format() {
+ return format;
+ }
+
+ protected DocValueFormat formatter() {
+ if (format != null) {
+ return new DocValueFormat.Decimal(format);
+ }
+ return DocValueFormat.RAW;
+ }
+
+ /**
+ * Sets the gap policy to use for this aggregation.
+ */
+ public MovFnPipelineAggregationBuilder gapPolicy(GapPolicy gapPolicy) {
+ if (gapPolicy == null) {
+ throw new IllegalArgumentException("[" + GAP_POLICY.getPreferredName() + "] must not be null.");
+ }
+ this.gapPolicy = gapPolicy;
+ return this;
+ }
+
+ /**
+ * Gets the gap policy to use for this aggregation.
+ */
+ public GapPolicy gapPolicy() {
+ return gapPolicy;
+ }
+
+ /**
+ * Returns the window size for this aggregation
+ */
+ public int getWindow() {
+ return window;
+ }
+
+ /**
+ * Sets the window size for this aggregation
+ */
+ public void setWindow(int window) {
+ if (window <= 0) {
+ throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer.");
+ }
+ this.window = window;
+ }
+
+ public void setShift(int shift) {
+ this.shift = shift;
+ }
+
+ @Override
+ protected void validate(ValidationContext context) {
+ if (window <= 0) {
+ context.addValidationError("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer.");
+ }
+ context.validateParentAggSequentiallyOrderedWithoutSkips(NAME, name);
+ }
+
+ @Override
+ protected PipelineAggregator createInternal(Map metadata) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.field(BUCKETS_PATH.getPreferredName(), bucketsPathString);
+ builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script);
+ if (format != null) {
+ builder.field(FORMAT.getPreferredName(), format);
+ }
+ builder.field(GAP_POLICY.getPreferredName(), gapPolicy.getName());
+ builder.field(WINDOW.getPreferredName(), window);
+ builder.field(SHIFT.getPreferredName(), shift);
+ return builder;
+ }
+
+ /**
+ * Used for serialization testing, since pipeline aggs serialize themselves as a named object but are parsed
+ * as a regular object with the name passed in.
+ */
+ static MovFnPipelineAggregationBuilder parse(XContentParser parser) throws IOException {
+ parser.nextToken();
+ if (parser.currentToken().equals(XContentParser.Token.START_OBJECT)) {
+ parser.nextToken();
+ if (parser.currentToken().equals(XContentParser.Token.FIELD_NAME)) {
+ String aggName = parser.currentName();
+ parser.nextToken(); // "moving_fn"
+ parser.nextToken(); // start_object
+ return PARSER.apply(parser, aggName);
+ }
+ }
+
+ throw new IllegalStateException("Expected aggregation name but none found");
+ }
+
+ @Override
+ protected boolean overrideBucketsPath() {
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), bucketsPathString, script, format, gapPolicy, window, shift);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) return true;
+ if (obj == null || getClass() != obj.getClass()) return false;
+ if (super.equals(obj) == false) return false;
+ MovFnPipelineAggregationBuilder other = (MovFnPipelineAggregationBuilder) obj;
+ return Objects.equals(bucketsPathString, other.bucketsPathString)
+ && Objects.equals(script, other.script)
+ && Objects.equals(format, other.format)
+ && Objects.equals(gapPolicy, other.gapPolicy)
+ && Objects.equals(window, other.window)
+ && Objects.equals(shift, other.shift);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersions.ZERO;
+ }
+}
diff --git a/src/main/java/org/elasticsearch/index/reindex/ParsedBulkByScrollResponse.java b/src/main/java/org/elasticsearch/index/reindex/ParsedBulkByScrollResponse.java
new file mode 100644
index 000000000..6e575e004
--- /dev/null
+++ b/src/main/java/org/elasticsearch/index/reindex/ParsedBulkByScrollResponse.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.index.reindex;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
+import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.xcontent.ObjectParser;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentParser.Token;
+
+import java.io.IOException;
+
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
+
+/**
+ * Response used for actions that index many documents using a scroll request.
+ */
+public class ParsedBulkByScrollResponse {
+
+ @SuppressWarnings("unchecked")
+ private static final ObjectParser PARSER = new ObjectParser<>(
+ "bulk_by_scroll_response",
+ true,
+ BulkByScrollResponseBuilder::new
+ );
+ static {
+ PARSER.declareLong(BulkByScrollResponseBuilder::setTook, new ParseField(BulkByScrollResponse.TOOK_FIELD));
+ PARSER.declareBoolean(BulkByScrollResponseBuilder::setTimedOut, new ParseField(BulkByScrollResponse.TIMED_OUT_FIELD));
+ PARSER.declareObjectArray(BulkByScrollResponseBuilder::setFailures, (p, c) -> parseFailure(p), new ParseField(BulkByScrollResponse.FAILURES_FIELD));
+ // since the result of BulkByScrollResponse.Status are mixed we also parse that in this
+ ParsedBulkByScrollTask.ParsedStatus.declareFields(PARSER);
+ }
+
+ public static BulkByScrollResponse fromXContent(XContentParser parser) {
+ return PARSER.apply(parser, null).buildResponse();
+ }
+
+ private static Object parseFailure(XContentParser parser) throws IOException {
+ ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser);
+ Token token;
+ String index = null;
+ String id = null;
+ Integer status = null;
+ Integer shardId = null;
+ String nodeId = null;
+ ElasticsearchException bulkExc = null;
+ ElasticsearchException searchExc = null;
+ while ((token = parser.nextToken()) != Token.END_OBJECT) {
+ ensureExpectedToken(Token.FIELD_NAME, token, parser);
+ String name = parser.currentName();
+ token = parser.nextToken();
+ if (token == Token.START_ARRAY) {
+ parser.skipChildren();
+ } else if (token == Token.START_OBJECT) {
+ switch (name) {
+ case SearchFailure.REASON_FIELD -> searchExc = ElasticsearchException.fromXContent(parser);
+ case Failure.CAUSE_FIELD -> bulkExc = ElasticsearchException.fromXContent(parser);
+ default -> parser.skipChildren();
+ }
+ } else if (token == Token.VALUE_STRING) {
+ switch (name) {
+ // This field is the same as SearchFailure.index
+ case Failure.INDEX_FIELD -> index = parser.text();
+ case Failure.ID_FIELD -> id = parser.text();
+ case SearchFailure.NODE_FIELD -> nodeId = parser.text();
+ }
+ } else if (token == Token.VALUE_NUMBER) {
+ switch (name) {
+ case Failure.STATUS_FIELD -> status = parser.intValue();
+ case SearchFailure.SHARD_FIELD -> shardId = parser.intValue();
+ }
+ }
+ }
+ if (bulkExc != null) {
+ return new Failure(index, id, bulkExc, RestStatus.fromCode(status));
+ } else if (searchExc != null) {
+ if (status == null) {
+ return new SearchFailure(searchExc, index, shardId, nodeId);
+ } else {
+ return new SearchFailure(searchExc, index, shardId, nodeId, RestStatus.fromCode(status));
+ }
+ } else {
+ throw new ElasticsearchParseException("failed to parse failures array. At least one of {reason,cause} must be present");
+ }
+ }
+}
diff --git a/src/main/java/org/elasticsearch/index/reindex/ParsedBulkByScrollTask.java b/src/main/java/org/elasticsearch/index/reindex/ParsedBulkByScrollTask.java
new file mode 100644
index 000000000..1f00506f3
--- /dev/null
+++ b/src/main/java/org/elasticsearch/index/reindex/ParsedBulkByScrollTask.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.index.reindex;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.core.Tuple;
+import org.elasticsearch.tasks.Task;
+import org.elasticsearch.xcontent.ConstructingObjectParser;
+import org.elasticsearch.xcontent.ObjectParser;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentParseException;
+import org.elasticsearch.xcontent.XContentParser;
+import org.elasticsearch.xcontent.XContentParser.Token;
+
+import java.io.IOException;
+
+import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
+import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
+
+/**
+ * Task storing information about a currently running BulkByScroll request.
+ *
+ * When the request is not sliced, this task is the only task created, and starts an action to perform search requests.
+ *
+ * When the request is sliced, this task can either represent a coordinating task (using
+ * {@link BulkByScrollTask#setWorkerCount(int)}) or a worker task that performs search queries (using
+ * {@link BulkByScrollTask#setWorker(float, Integer)}).
+ *
+ * We don't always know if this task will be a leader or worker task when it's created, because if slices is set to "auto" it may
+ * be either depending on the number of shards in the source indices. We figure that out when the request is handled and set it on this
+ * class with {@link #setWorkerCount(int)} or {@link #setWorker(float, Integer)}.
+ */
+public class ParsedBulkByScrollTask {
+
+ /**
+ * Status of the reindex, update by query, or delete by query. While in
+ * general we allow {@linkplain Task.Status} implementations to make
+ * backwards incompatible changes to their {@link Task.Status#toXContent}
+ * implementations, this one has become defacto standardized because Kibana
+ * parses it. As such, we should be very careful about removing things from
+ * this.
+ */
+ public static class ParsedStatus {
+
+ static final ConstructingObjectParser, Void> RETRIES_PARSER = new ConstructingObjectParser<>(
+ "bulk_by_scroll_task_status_retries",
+ true,
+ a -> new Tuple<>(((Long) a[0]), (Long) a[1])
+ );
+ static {
+ RETRIES_PARSER.declareLong(constructorArg(), new ParseField(BulkByScrollTask.Status.RETRIES_BULK_FIELD));
+ RETRIES_PARSER.declareLong(constructorArg(), new ParseField(BulkByScrollTask.Status.RETRIES_SEARCH_FIELD));
+ }
+
+ public static void declareFields(ObjectParser extends BulkByScrollTask.StatusBuilder, Void> parser) {
+ parser.declareInt(BulkByScrollTask.StatusBuilder::setSliceId, new ParseField(BulkByScrollTask.Status.SLICE_ID_FIELD));
+ parser.declareLong(BulkByScrollTask.StatusBuilder::setTotal, new ParseField(BulkByScrollTask.Status.TOTAL_FIELD));
+ parser.declareLong(BulkByScrollTask.StatusBuilder::setUpdated, new ParseField(BulkByScrollTask.Status.UPDATED_FIELD));
+ parser.declareLong(BulkByScrollTask.StatusBuilder::setCreated, new ParseField(BulkByScrollTask.Status.CREATED_FIELD));
+ parser.declareLong(BulkByScrollTask.StatusBuilder::setDeleted, new ParseField(BulkByScrollTask.Status.DELETED_FIELD));
+ parser.declareInt(BulkByScrollTask.StatusBuilder::setBatches, new ParseField(BulkByScrollTask.Status.BATCHES_FIELD));
+ parser.declareLong(BulkByScrollTask.StatusBuilder::setVersionConflicts, new ParseField(BulkByScrollTask.Status.VERSION_CONFLICTS_FIELD));
+ parser.declareLong(BulkByScrollTask.StatusBuilder::setNoops, new ParseField(BulkByScrollTask.Status.NOOPS_FIELD));
+ parser.declareObject(BulkByScrollTask.StatusBuilder::setRetries, RETRIES_PARSER, new ParseField(BulkByScrollTask.Status.RETRIES_FIELD));
+ parser.declareLong(BulkByScrollTask.StatusBuilder::setThrottled, new ParseField(BulkByScrollTask.Status.THROTTLED_RAW_FIELD));
+ parser.declareFloat(BulkByScrollTask.StatusBuilder::setRequestsPerSecond, new ParseField(BulkByScrollTask.Status.REQUESTS_PER_SEC_FIELD));
+ parser.declareString(BulkByScrollTask.StatusBuilder::setReasonCancelled, new ParseField(BulkByScrollTask.Status.CANCELED_FIELD));
+ parser.declareLong(BulkByScrollTask.StatusBuilder::setThrottledUntil, new ParseField(BulkByScrollTask.Status.THROTTLED_UNTIL_RAW_FIELD));
+ parser.declareObjectArray(
+ BulkByScrollTask.StatusBuilder::setSliceStatuses,
+ (p, c) -> ParsedStatusOrException.fromXContent(p),
+ new ParseField(BulkByScrollTask.Status.SLICES_FIELD)
+ );
+ }
+
+ public static BulkByScrollTask.Status fromXContent(XContentParser parser) throws IOException {
+ XContentParser.Token token;
+ if (parser.currentToken() == Token.START_OBJECT) {
+ token = parser.nextToken();
+ } else {
+ token = parser.nextToken();
+ }
+ ensureExpectedToken(Token.START_OBJECT, token, parser);
+ token = parser.nextToken();
+ ensureExpectedToken(Token.FIELD_NAME, token, parser);
+ return innerFromXContent(parser);
+ }
+
+ public static BulkByScrollTask.Status innerFromXContent(XContentParser parser) throws IOException {
+ Token token = parser.currentToken();
+ String fieldName = parser.currentName();
+ ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
+ BulkByScrollTask.StatusBuilder builder = new BulkByScrollTask.StatusBuilder();
+ while ((token = parser.nextToken()) != Token.END_OBJECT) {
+ if (token == Token.FIELD_NAME) {
+ fieldName = parser.currentName();
+ } else if (token == Token.START_OBJECT) {
+ if (fieldName.equals(BulkByScrollTask.Status.RETRIES_FIELD)) {
+ builder.setRetries(ParsedStatus.RETRIES_PARSER.parse(parser, null));
+ } else {
+ parser.skipChildren();
+ }
+ } else if (token == Token.START_ARRAY) {
+ if (fieldName.equals(BulkByScrollTask.Status.SLICES_FIELD)) {
+ while ((token = parser.nextToken()) != Token.END_ARRAY) {
+ builder.addToSliceStatuses(ParsedStatusOrException.fromXContent(parser));
+ }
+ } else {
+ parser.skipChildren();
+ }
+ } else { // else if it is a value
+ switch (fieldName) {
+ case BulkByScrollTask.Status.SLICE_ID_FIELD -> builder.setSliceId(parser.intValue());
+ case BulkByScrollTask.Status.TOTAL_FIELD -> builder.setTotal(parser.longValue());
+ case BulkByScrollTask.Status.UPDATED_FIELD -> builder.setUpdated(parser.longValue());
+ case BulkByScrollTask.Status.CREATED_FIELD -> builder.setCreated(parser.longValue());
+ case BulkByScrollTask.Status.DELETED_FIELD -> builder.setDeleted(parser.longValue());
+ case BulkByScrollTask.Status.BATCHES_FIELD -> builder.setBatches(parser.intValue());
+ case BulkByScrollTask.Status.VERSION_CONFLICTS_FIELD -> builder.setVersionConflicts(parser.longValue());
+ case BulkByScrollTask.Status.NOOPS_FIELD -> builder.setNoops(parser.longValue());
+ case BulkByScrollTask.Status.THROTTLED_RAW_FIELD -> builder.setThrottled(parser.longValue());
+ case BulkByScrollTask.Status.REQUESTS_PER_SEC_FIELD -> builder.setRequestsPerSecond(parser.floatValue());
+ case BulkByScrollTask.Status.CANCELED_FIELD -> builder.setReasonCancelled(parser.text());
+ case BulkByScrollTask.Status.THROTTLED_UNTIL_RAW_FIELD -> builder.setThrottledUntil(parser.longValue());
+ }
+ }
+ }
+ return builder.buildStatus();
+ }
+ }
+
+ /**
+ * The status of a slice of the request. Successful requests store the {@link StatusOrException#status} while failing requests store a
+ * {@link StatusOrException#exception}.
+ */
+ public static class ParsedStatusOrException {
+
+ /**
+ * Since {@link StatusOrException} can contain either an {@link Exception} or a {@link Status} we need to peek
+ * at a field first before deciding what needs to be parsed since the same object could contains either.
+ * The {@link #EXPECTED_EXCEPTION_FIELDS} contains the fields that are expected when the serialised object
+ * was an instance of exception and the {@link Status#FIELDS_SET} is the set of fields expected when the
+ * serialized object was an instance of Status.
+ */
+ public static BulkByScrollTask.StatusOrException fromXContent(XContentParser parser) throws IOException {
+ XContentParser.Token token = parser.currentToken();
+ if (token == null) {
+ token = parser.nextToken();
+ }
+ if (token == Token.VALUE_NULL) {
+ return null;
+ } else {
+ ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
+ token = parser.nextToken();
+ // This loop is present only to ignore unknown tokens. It breaks as soon as we find a field
+ // that is allowed.
+ while (token != Token.END_OBJECT) {
+ ensureExpectedToken(Token.FIELD_NAME, token, parser);
+ String fieldName = parser.currentName();
+ // weird way to ignore unknown tokens
+ if (BulkByScrollTask.Status.FIELDS_SET.contains(fieldName)) {
+ return new BulkByScrollTask.StatusOrException(ParsedStatus.innerFromXContent(parser));
+ } else if (BulkByScrollTask.StatusOrException.EXPECTED_EXCEPTION_FIELDS.contains(fieldName)) {
+ return new BulkByScrollTask.StatusOrException(ElasticsearchException.innerFromXContent(parser, false));
+ } else {
+ // Ignore unknown tokens
+ token = parser.nextToken();
+ if (token == Token.START_OBJECT || token == Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ token = parser.nextToken();
+ }
+ }
+ throw new XContentParseException("Unable to parse StatusFromException. Expected fields not found.");
+ }
+ }
+ }
+}
diff --git a/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java b/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java
new file mode 100644
index 000000000..3af59349c
--- /dev/null
+++ b/src/main/java/org/elasticsearch/join/aggregations/ChildrenAggregationBuilder.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+
+package org.elasticsearch.join.aggregations;
+
+import org.apache.lucene.search.Query;
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.TransportVersions;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.search.aggregations.AggregationBuilder;
+import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
+import org.elasticsearch.search.aggregations.AggregatorFactory;
+import org.elasticsearch.search.aggregations.support.AggregationContext;
+import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
+import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
+import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
+import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
+import org.elasticsearch.search.aggregations.support.ValuesSourceType;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Objects;
+
+public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder {
+
+ public static final String NAME = "children";
+ private static final ParseField TYPE_FIELD = new ParseField("type");
+
+ private final String childType;
+ private Query parentFilter;
+ private Query childFilter;
+
+ /**
+ * @param name
+ * the name of this aggregation
+ * @param childType
+ * the type of children documents
+ */
+ public ChildrenAggregationBuilder(String name, String childType) {
+ super(name);
+ if (childType == null) {
+ throw new IllegalArgumentException("[childType] must not be null: [" + name + "]");
+ }
+ this.childType = childType;
+ }
+
+ protected ChildrenAggregationBuilder(ChildrenAggregationBuilder clone, Builder factoriesBuilder, Map metadata) {
+ super(clone, factoriesBuilder, metadata);
+ this.childType = clone.childType;
+ this.childFilter = clone.childFilter;
+ this.parentFilter = clone.parentFilter;
+ }
+
+ @Override
+ protected ValuesSourceType defaultValueSourceType() {
+ return CoreValuesSourceType.KEYWORD;
+ }
+
+ @Override
+ protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map metadata) {
+ return new ChildrenAggregationBuilder(this, factoriesBuilder, metadata);
+ }
+
+ /**
+ * Read from a stream.
+ */
+ public ChildrenAggregationBuilder(StreamInput in) throws IOException {
+ super(in);
+ childType = in.readString();
+ }
+
+ @Override
+ protected void innerWriteTo(StreamOutput out) throws IOException {
+ out.writeString(childType);
+ }
+
+ @Override
+ public BucketCardinality bucketCardinality() {
+ return BucketCardinality.ONE;
+ }
+
+ @Override
+ protected ValuesSourceAggregatorFactory innerBuild(
+ AggregationContext context,
+ ValuesSourceConfig config,
+ AggregatorFactory parent,
+ Builder subFactoriesBuilder
+ ) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ protected ValuesSourceConfig resolveConfig(AggregationContext context) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
+ builder.field(TYPE_FIELD.getPreferredName(), childType);
+ return builder;
+ }
+
+ public static ChildrenAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
+ String childType = null;
+
+ XContentParser.Token token;
+ String currentFieldName = null;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token == XContentParser.Token.VALUE_STRING) {
+ if ("type".equals(currentFieldName)) {
+ childType = parser.text();
+ } else {
+ throw new ParsingException(
+ parser.getTokenLocation(),
+ "Unknown key for a " + token + " in [" + aggregationName + "]: [" + currentFieldName + "]."
+ );
+ }
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " in [" + aggregationName + "].");
+ }
+ }
+
+ if (childType == null) {
+ throw new ParsingException(
+ parser.getTokenLocation(),
+ "Missing [child_type] field for children aggregation [" + aggregationName + "]"
+ );
+ }
+
+ return new ChildrenAggregationBuilder(aggregationName, childType);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), childType);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) return true;
+ if (obj == null || getClass() != obj.getClass()) return false;
+ if (super.equals(obj) == false) return false;
+ ChildrenAggregationBuilder other = (ChildrenAggregationBuilder) obj;
+ return Objects.equals(childType, other.childType);
+ }
+
+ @Override
+ public String getType() {
+ return NAME;
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersions.ZERO;
+ }
+}
diff --git a/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java
new file mode 100644
index 000000000..0f7b7c4a8
--- /dev/null
+++ b/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java
@@ -0,0 +1,338 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0 and the Server Side Public License, v 1; you may not use this file except
+ * in compliance with, at your election, the Elastic License 2.0 or the Server
+ * Side Public License, v 1.
+ */
+package org.elasticsearch.join.query;
+
+import org.apache.lucene.search.MatchNoDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.join.ScoreMode;
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.TransportVersions;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.index.query.AbstractQueryBuilder;
+import org.elasticsearch.index.query.InnerHitBuilder;
+import org.elasticsearch.index.query.InnerHitContextBuilder;
+import org.elasticsearch.index.query.NestedQueryBuilder;
+import org.elasticsearch.index.query.QueryBuilder;
+import org.elasticsearch.index.query.QueryRewriteContext;
+import org.elasticsearch.index.query.SearchExecutionContext;
+import org.elasticsearch.xcontent.ParseField;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentParser;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A query builder for {@code has_child} query.
+ */
+public class HasChildQueryBuilder extends AbstractQueryBuilder {
+ public static final String NAME = "has_child";
+
+ /**
+ * The default maximum number of children that are required to match for the parent to be considered a match.
+ */
+ public static final int DEFAULT_MAX_CHILDREN = Integer.MAX_VALUE;
+ /**
+ * The default minimum number of children that are required to match for the parent to be considered a match.
+ */
+ public static final int DEFAULT_MIN_CHILDREN = 1;
+ private static final ScoreMode DEFAULT_SCORE_MODE = ScoreMode.None;
+
+ /**
+ * The default value for ignore_unmapped.
+ */
+ public static final boolean DEFAULT_IGNORE_UNMAPPED = false;
+
+ private static final ParseField QUERY_FIELD = new ParseField("query");
+ private static final ParseField TYPE_FIELD = new ParseField("type");
+ private static final ParseField MAX_CHILDREN_FIELD = new ParseField("max_children");
+ private static final ParseField MIN_CHILDREN_FIELD = new ParseField("min_children");
+ private static final ParseField SCORE_MODE_FIELD = new ParseField("score_mode");
+ private static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits");
+ private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped");
+
+ private final QueryBuilder query;
+ private final String type;
+ private final ScoreMode scoreMode;
+ private InnerHitBuilder innerHitBuilder;
+ private int minChildren = DEFAULT_MIN_CHILDREN;
+ private int maxChildren = DEFAULT_MAX_CHILDREN;
+ private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
+
+ public HasChildQueryBuilder(String type, QueryBuilder query, ScoreMode scoreMode) {
+ this(type, query, DEFAULT_MIN_CHILDREN, DEFAULT_MAX_CHILDREN, scoreMode, null);
+ }
+
+ private HasChildQueryBuilder(
+ String type,
+ QueryBuilder query,
+ int minChildren,
+ int maxChildren,
+ ScoreMode scoreMode,
+ InnerHitBuilder innerHitBuilder
+ ) {
+ this.type = requireValue(type, "[" + NAME + "] requires 'type' field");
+ this.query = requireValue(query, "[" + NAME + "] requires 'query' field");
+ this.scoreMode = requireValue(scoreMode, "[" + NAME + "] requires 'score_mode' field");
+ this.innerHitBuilder = innerHitBuilder;
+ this.minChildren = minChildren;
+ this.maxChildren = maxChildren;
+ }
+
+ /**
+ * Read from a stream.
+ */
+ public HasChildQueryBuilder(StreamInput in) throws IOException {
+ super(in);
+ type = in.readString();
+ minChildren = in.readInt();
+ maxChildren = in.readInt();
+ scoreMode = ScoreMode.values()[in.readVInt()];
+ query = in.readNamedWriteable(QueryBuilder.class);
+ innerHitBuilder = in.readOptionalWriteable(InnerHitBuilder::new);
+ ignoreUnmapped = in.readBoolean();
+ }
+
+ @Override
+ protected void doWriteTo(StreamOutput out) throws IOException {
+ out.writeString(type);
+ out.writeInt(minChildren);
+ out.writeInt(maxChildren);
+ out.writeVInt(scoreMode.ordinal());
+ out.writeNamedWriteable(query);
+ out.writeOptionalWriteable(innerHitBuilder);
+ out.writeBoolean(ignoreUnmapped);
+ }
+
+ /**
+ * Defines the minimum number of children that are required to match for the parent to be considered a match and
+ * the maximum number of children that are required to match for the parent to be considered a match.
+ */
+ public HasChildQueryBuilder minMaxChildren(int min, int max) {
+ if (min <= 0) {
+ throw new IllegalArgumentException("[" + NAME + "] requires positive 'min_children' field");
+ }
+ if (max <= 0) {
+ throw new IllegalArgumentException("[" + NAME + "] requires positive 'max_children' field");
+ }
+ if (max < min) {
+ throw new IllegalArgumentException("[" + NAME + "] 'max_children' is less than 'min_children'");
+ }
+ this.minChildren = min;
+ this.maxChildren = max;
+ return this;
+ }
+
+ /**
+ * Returns inner hit definition in the scope of this query and reusing the defined type and query.
+ */
+ public InnerHitBuilder innerHit() {
+ return innerHitBuilder;
+ }
+
+ public HasChildQueryBuilder innerHit(InnerHitBuilder innerHit) {
+ this.innerHitBuilder = innerHit;
+ innerHitBuilder.setIgnoreUnmapped(ignoreUnmapped);
+ return this;
+ }
+
+ /**
+ * Returns the children query to execute.
+ */
+ public QueryBuilder query() {
+ return query;
+ }
+
+ /**
+ * Returns the child type
+ */
+ public String childType() {
+ return type;
+ }
+
+ /**
+ * Returns how the scores from the matching child documents are mapped into the parent document.
+ */
+ public ScoreMode scoreMode() {
+ return scoreMode;
+ }
+
+ /**
+ * Returns the minimum number of children that are required to match for the parent to be considered a match.
+ * The default is {@value #DEFAULT_MIN_CHILDREN}
+ */
+ public int minChildren() {
+ return minChildren;
+ }
+
+ /**
+ * Returns the maximum number of children that are required to match for the parent to be considered a match.
+ * The default is {@value #DEFAULT_MAX_CHILDREN}
+ */
+ public int maxChildren() {
+ return maxChildren;
+ }
+
+ /**
+ * Sets whether the query builder should ignore unmapped types (and run a
+ * {@link MatchNoDocsQuery} in place of this query) or throw an exception if
+ * the type is unmapped.
+ */
+ public HasChildQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) {
+ this.ignoreUnmapped = ignoreUnmapped;
+ if (innerHitBuilder != null) {
+ innerHitBuilder.setIgnoreUnmapped(ignoreUnmapped);
+ }
+ return this;
+ }
+
+ /**
+ * Gets whether the query builder will ignore unmapped types (and run a
+ * {@link MatchNoDocsQuery} in place of this query) or throw an exception if
+ * the type is unmapped.
+ */
+ public boolean ignoreUnmapped() {
+ return ignoreUnmapped;
+ }
+
+ @Override
+ protected void doXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject(NAME);
+ builder.field(QUERY_FIELD.getPreferredName());
+ query.toXContent(builder, params);
+ builder.field(TYPE_FIELD.getPreferredName(), type);
+ if (false == scoreMode.equals(DEFAULT_SCORE_MODE)) {
+ builder.field(SCORE_MODE_FIELD.getPreferredName(), NestedQueryBuilder.scoreModeAsString(scoreMode));
+ }
+ if (minChildren != DEFAULT_MIN_CHILDREN) {
+ builder.field(MIN_CHILDREN_FIELD.getPreferredName(), minChildren);
+ }
+ if (maxChildren != DEFAULT_MAX_CHILDREN) {
+ builder.field(MAX_CHILDREN_FIELD.getPreferredName(), maxChildren);
+ }
+ if (ignoreUnmapped != DEFAULT_IGNORE_UNMAPPED) {
+ builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped);
+ }
+ boostAndQueryNameToXContent(builder);
+ if (innerHitBuilder != null) {
+ builder.field(INNER_HITS_FIELD.getPreferredName(), innerHitBuilder, params);
+ }
+ builder.endObject();
+ }
+
+ public static HasChildQueryBuilder fromXContent(XContentParser parser) throws IOException {
+ float boost = AbstractQueryBuilder.DEFAULT_BOOST;
+ String childType = null;
+ ScoreMode scoreMode = DEFAULT_SCORE_MODE;
+ int minChildren = HasChildQueryBuilder.DEFAULT_MIN_CHILDREN;
+ int maxChildren = HasChildQueryBuilder.DEFAULT_MAX_CHILDREN;
+ boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED;
+ String queryName = null;
+ InnerHitBuilder innerHitBuilder = null;
+ String currentFieldName = null;
+ XContentParser.Token token;
+ QueryBuilder iqb = null;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ if (token == XContentParser.Token.FIELD_NAME) {
+ currentFieldName = parser.currentName();
+ } else if (token == XContentParser.Token.START_OBJECT) {
+ if (QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ iqb = parseInnerQueryBuilder(parser);
+ } else if (INNER_HITS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ innerHitBuilder = InnerHitBuilder.fromXContent(parser);
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]");
+ }
+ } else if (token.isValue()) {
+ if (TYPE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ childType = parser.text();
+ } else if (SCORE_MODE_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ scoreMode = NestedQueryBuilder.parseScoreMode(parser.text());
+ } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ boost = parser.floatValue();
+ } else if (MIN_CHILDREN_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ minChildren = parser.intValue(true);
+ } else if (MAX_CHILDREN_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ maxChildren = parser.intValue(true);
+ } else if (IGNORE_UNMAPPED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ ignoreUnmapped = parser.booleanValue();
+ } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
+ queryName = parser.text();
+ } else {
+ throw new ParsingException(parser.getTokenLocation(), "[has_child] query does not support [" + currentFieldName + "]");
+ }
+ }
+ }
+ HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, scoreMode);
+ hasChildQueryBuilder.minMaxChildren(minChildren, maxChildren);
+ hasChildQueryBuilder.queryName(queryName);
+ hasChildQueryBuilder.boost(boost);
+ hasChildQueryBuilder.ignoreUnmapped(ignoreUnmapped);
+ if (innerHitBuilder != null) {
+ hasChildQueryBuilder.innerHit(innerHitBuilder);
+ }
+ return hasChildQueryBuilder;
+ }
+
+ @Override
+ public String getWriteableName() {
+ return NAME;
+ }
+
+ @Override
+ protected Query doToQuery(SearchExecutionContext context) throws IOException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ protected boolean doEquals(HasChildQueryBuilder that) {
+ return Objects.equals(query, that.query)
+ && Objects.equals(type, that.type)
+ && Objects.equals(scoreMode, that.scoreMode)
+ && Objects.equals(minChildren, that.minChildren)
+ && Objects.equals(maxChildren, that.maxChildren)
+ && Objects.equals(innerHitBuilder, that.innerHitBuilder)
+ && Objects.equals(ignoreUnmapped, that.ignoreUnmapped);
+ }
+
+ @Override
+ protected int doHashCode() {
+ return Objects.hash(query, type, scoreMode, minChildren, maxChildren, innerHitBuilder, ignoreUnmapped);
+ }
+
+ @Override
+ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
+ QueryBuilder rewrittenQuery = query.rewrite(queryRewriteContext);
+ if (rewrittenQuery != query) {
+ HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(
+ type,
+ rewrittenQuery,
+ minChildren,
+ maxChildren,
+ scoreMode,
+ innerHitBuilder
+ );
+ hasChildQueryBuilder.ignoreUnmapped(ignoreUnmapped);
+ return hasChildQueryBuilder;
+ }
+ return this;
+ }
+
+ @Override
+ protected void extractInnerHitBuilders(Map innerHits) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public TransportVersion getMinimalSupportedVersion() {
+ return TransportVersions.ZERO;
+ }
+}
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/ActionRequestRestExecuter.java b/src/main/java/org/elasticsearch/plugin/nlpcn/ActionRequestRestExecuter.java
index ba80a10a2..f73841fd5 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/ActionRequestRestExecuter.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/ActionRequestRestExecuter.java
@@ -1,16 +1,11 @@
package org.elasticsearch.plugin.nlpcn;
import org.elasticsearch.action.ActionRequest;
-import org.elasticsearch.action.ActionRequestBuilder;
-import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.search.SearchRequest;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.client.Client;
-import org.elasticsearch.rest.BytesRestResponse;
+import org.elasticsearch.client.internal.Client;
import org.elasticsearch.rest.RestChannel;
-import org.elasticsearch.rest.RestStatus;
-import org.elasticsearch.rest.action.RestStatusToXContentListener;
+import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener;
import org.nlpcn.es4sql.exception.SqlParseException;
import org.nlpcn.es4sql.query.SqlElasticDeleteByQueryRequestBuilder;
import org.nlpcn.es4sql.query.SqlElasticRequestBuilder;
@@ -44,7 +39,7 @@ public void execute() throws Exception {
executeJoinRequestAndSendResponse();
}
else if (request instanceof SearchRequest) {
- client.search((SearchRequest) request, new RestStatusToXContentListener(channel));
+ client.search((SearchRequest) request, new RestRefCountedChunkedToXContentListener<>(channel));
} else if (requestBuilder instanceof SqlElasticDeleteByQueryRequestBuilder) {
throw new UnsupportedOperationException("currently not support delete on elastic 2.0.0");
}
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/ComperableHitResult.java b/src/main/java/org/elasticsearch/plugin/nlpcn/ComperableHitResult.java
index 952170fab..aa3b3da94 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/ComperableHitResult.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/ComperableHitResult.java
@@ -2,6 +2,7 @@
import com.google.common.base.Joiner;
import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.lookup.Source;
import org.nlpcn.es4sql.Util;
import java.util.ArrayList;
@@ -19,7 +20,7 @@ public class ComperableHitResult {
private Map flattenMap;
public ComperableHitResult(SearchHit hit , String[] fieldsOrder ,String seperator) {
this.hit = hit;
- Map hitAsMap = hit.getSourceAsMap();
+ Map hitAsMap = Source.fromBytes(hit.getSourceRef()).source();
this.flattenMap = new HashMap<>();
List results = new ArrayList<>();
this.isAllNull = true;
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticHitsExecutor.java b/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticHitsExecutor.java
index 84da8e065..75de36e3d 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticHitsExecutor.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticHitsExecutor.java
@@ -1,13 +1,6 @@
package org.elasticsearch.plugin.nlpcn;
-import org.elasticsearch.action.search.SearchRequestBuilder;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.search.SearchType;
-import org.elasticsearch.client.Client;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.search.SearchHits;
-import org.nlpcn.es4sql.domain.Select;
import org.nlpcn.es4sql.exception.SqlParseException;
import java.io.IOException;
@@ -16,6 +9,6 @@
* Created by Eliran on 21/8/2016.
*/
public interface ElasticHitsExecutor {
- public void run() throws IOException, SqlParseException ;
- public SearchHits getHits();
+ void run() throws IOException, SqlParseException ;
+ SearchHits getHits();
}
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticJoinExecutor.java b/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticJoinExecutor.java
index 343c563b4..87dc7880f 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticJoinExecutor.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticJoinExecutor.java
@@ -1,17 +1,13 @@
package org.elasticsearch.plugin.nlpcn;
-import com.google.common.collect.ImmutableMap;
+import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.search.SearchType;
-import org.elasticsearch.client.Client;
-import org.elasticsearch.common.text.Text;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.common.xcontent.XContentType;
-
-import org.elasticsearch.rest.BytesRestResponse;
+import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.rest.RestResponse;
+import org.elasticsearch.search.lookup.Source;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.core.TimeValue;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
@@ -27,7 +23,13 @@
import org.nlpcn.es4sql.query.join.TableInJoinRequestBuilder;
import java.io.IOException;
-import java.util.*;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
/**
* Created by Eliran on 15/9/2015.
@@ -48,22 +50,19 @@ protected ElasticJoinExecutor(JoinRequestBuilder requestBuilder) {
&& (secondTableReturnedField == null || secondTableReturnedField.size() == 0);
}
- public void sendResponse(RestChannel channel){
- try {
- String json = ElasticUtils.hitsAsStringResult(results,metaResults);
- BytesRestResponse bytesRestResponse = new BytesRestResponse(RestStatus.OK, json);
- channel.sendResponse(bytesRestResponse);
- } catch (IOException e) {
- e.printStackTrace();
- }
+ public void sendResponse(RestChannel channel) throws IOException {
+ XContentBuilder builder = ElasticUtils.hitsAsXContentBuilder(results, metaResults);
+ RestResponse bytesRestResponse = new RestResponse(RestStatus.OK, builder);
+ channel.sendResponse(bytesRestResponse);
}
+ @Override
public void run() throws IOException, SqlParseException {
long timeBefore = System.currentTimeMillis();
List combinedSearchHits = innerRun();
int resultsSize = combinedSearchHits.size();
SearchHit[] hits = combinedSearchHits.toArray(new SearchHit[resultsSize]);
- this.results = new SearchHits(hits, resultsSize,1.0f);
+ this.results = SearchHits.unpooled(hits, new TotalHits(resultsSize, TotalHits.Relation.EQUAL_TO), 1.0f);
long joinTimeInMilli = System.currentTimeMillis() - timeBefore;
this.metaResults.setTookImMilli(joinTimeInMilli);
}
@@ -71,6 +70,7 @@ public void run() throws IOException, SqlParseException {
protected abstract List innerRun() throws IOException, SqlParseException ;
+ @Override
public SearchHits getHits(){
return results;
}
@@ -89,11 +89,11 @@ else if (requestBuilder instanceof NestedLoopsElasticRequestBuilder){
}
}
- protected void mergeSourceAndAddAliases(Map secondTableHitSource, SearchHit searchHit,String t1Alias,String t2Alias) {
- Map results = mapWithAliases(searchHit.getSourceAsMap(), t1Alias);
+ protected void mergeSourceAndAddAliases(Map secondTableHitSource, Map hitSource, String t1Alias, String t2Alias) {
+ Map results = mapWithAliases(hitSource, t1Alias);
results.putAll(mapWithAliases(secondTableHitSource, t2Alias));
- searchHit.getSourceAsMap().clear();
- searchHit.getSourceAsMap().putAll(results);
+ hitSource.clear();
+ hitSource.putAll(results);
}
protected Map mapWithAliases(Map source, String alias) {
@@ -166,16 +166,18 @@ protected void addUnmatchedResults(List combinedResults, Collection secondTableReturnedFields, int docId, String t1Alias, String t2Alias, SearchHit hit) {
String unmatchedId = hit.getId() + "|0";
- Text unamatchedType = new Text(hit.getType() + "|null");
-
- SearchHit searchHit = new SearchHit(docId, unmatchedId, unamatchedType, hit.getFields());
+ SearchHit searchHit = SearchHit.unpooled(docId, unmatchedId);
+ searchHit.addDocumentFields(hit.getDocumentFields(), Collections.emptyMap());
searchHit.sourceRef(hit.getSourceRef());
- searchHit.getSourceAsMap().clear();
- searchHit.getSourceAsMap().putAll(hit.getSourceAsMap());
+ Source source = Source.fromBytes(searchHit.getSourceRef());
+ Map hitSource = source.source();
+ hitSource.clear();
+ hitSource.putAll(Source.fromBytes(hit.getSourceRef()).source());
Map emptySecondTableHitSource = createNullsSource(secondTableReturnedFields);
- mergeSourceAndAddAliases(emptySecondTableHitSource, searchHit,t1Alias,t2Alias);
+ mergeSourceAndAddAliases(emptySecondTableHitSource, hitSource, t1Alias,t2Alias);
+ searchHit.sourceRef(Source.fromMap(hitSource, source.sourceContentType()).internalSourceRef());
return searchHit;
}
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticResultHandler.java b/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticResultHandler.java
index 2e1da3104..33ea7a54f 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticResultHandler.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticResultHandler.java
@@ -1,13 +1,8 @@
package org.elasticsearch.plugin.nlpcn;
import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.aggregations.Aggregation;
-import org.elasticsearch.search.aggregations.Aggregations;
-import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
+import org.elasticsearch.search.lookup.Source;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
import java.util.Map;
/**
@@ -15,7 +10,7 @@
*/
public class ElasticResultHandler {
public static Object getFieldValue(SearchHit hit,String field){
- return deepSearchInMap(hit.getSourceAsMap(),field);
+ return deepSearchInMap(Source.fromBytes(hit.getSourceRef()).source(),field);
}
private static Object deepSearchInMap(Map fieldsMap, String name) {
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticUtils.java b/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticUtils.java
index 9d54e2e25..5546a15c0 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticUtils.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/ElasticUtils.java
@@ -1,14 +1,15 @@
package org.elasticsearch.plugin.nlpcn;
import com.google.common.collect.ImmutableMap;
+import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.search.SearchType;
-import org.elasticsearch.client.Client;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.common.xcontent.XContentFactory;
-import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.search.lookup.Source;
+import org.elasticsearch.xcontent.XContentBuilder;
+import org.elasticsearch.xcontent.XContentFactory;
+import org.elasticsearch.xcontent.XContentType;
+import org.elasticsearch.core.TimeValue;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.sort.FieldSortBuilder;
@@ -40,22 +41,23 @@ public static SearchResponse scrollOneTimeWithHits(Client client, SearchRequestB
//use our deserializer instead of results toXcontent because the source field is differnet from sourceAsMap.
- public static String hitsAsStringResult(SearchHits results, MetaSearchResult metaResults) throws IOException {
+ public static XContentBuilder hitsAsXContentBuilder(SearchHits results, MetaSearchResult metaResults) throws IOException {
if(results == null) return null;
Object[] searchHits;
- searchHits = new Object[(int) results.getTotalHits()];
+ searchHits = new Object[(int) results.getTotalHits().value()];
int i = 0;
for(SearchHit hit : results) {
HashMap value = new HashMap<>();
value.put("_id",hit.getId());
- value.put("_type", hit.getType());
value.put("_score", hit.getScore());
- value.put("_source", hit.getSourceAsMap());
+ value.put("_source", Source.fromBytes(hit.getSourceRef()).source());
searchHits[i] = value;
i++;
}
HashMap hits = new HashMap<>();
- hits.put("total",results.getTotalHits());
+ TotalHits totalHits = results.getTotalHits();
+ hits.put("total", ImmutableMap.of("value", totalHits.value(),
+ "relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"));
hits.put("max_score",results.getMaxScore());
hits.put("hits",searchHits);
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint();
@@ -67,7 +69,6 @@ public static String hitsAsStringResult(SearchHits results, MetaSearchResult met
, "failed", metaResults.getFailedShards()));
builder.field("hits",hits) ;
builder.endObject();
-
- return builder.string();
+ return builder;
}
}
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/GetIndexRequestRestListener.java b/src/main/java/org/elasticsearch/plugin/nlpcn/GetIndexRequestRestListener.java
index 4d3f55236..d0cbec317 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/GetIndexRequestRestListener.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/GetIndexRequestRestListener.java
@@ -1,16 +1,12 @@
package org.elasticsearch.plugin.nlpcn;
-import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
-import org.elasticsearch.cluster.metadata.AliasMetaData;
-import org.elasticsearch.cluster.metadata.MappingMetaData;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
+import org.elasticsearch.cluster.metadata.AliasMetadata;
import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.rest.BytesRestResponse;
+import org.elasticsearch.xcontent.ToXContent;
+import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.RestStatus;
@@ -18,6 +14,7 @@
import java.io.IOException;
import java.util.List;
+import java.util.Map;
/**
* Created by Eliran on 6/10/2015.
@@ -45,7 +42,7 @@ public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBui
writeAliases(getIndexResponse.aliases().get(index), builder, channel.request());
break;
case MAPPINGS:
- writeMappings(getIndexResponse.mappings().get(index), builder, channel.request());
+ writeMappings((Map) getIndexResponse.mappings().get(index).rawSourceAsMap(), builder, channel.request());
break;
case SETTINGS:
writeSettings(getIndexResponse.settings().get(index), builder, channel.request());
@@ -59,24 +56,24 @@ public RestResponse buildResponse(GetIndexResponse getIndexResponse, XContentBui
}
builder.endObject();
- return new BytesRestResponse(RestStatus.OK, builder);
+ return new RestResponse(RestStatus.OK, builder);
}
- private void writeAliases(List aliases, XContentBuilder builder, ToXContent.Params params) throws IOException {
+ private void writeAliases(List aliases, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(Fields.ALIASES);
if (aliases != null) {
- for (AliasMetaData alias : aliases) {
- AliasMetaData.Builder.toXContent(alias, builder, params);
+ for (AliasMetadata alias : aliases) {
+ AliasMetadata.Builder.toXContent(alias, builder, params);
}
}
builder.endObject();
}
- private void writeMappings(ImmutableOpenMap mappings, XContentBuilder builder, ToXContent.Params params) throws IOException {
+ private void writeMappings(Map> mappings, XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(Fields.MAPPINGS);
if (mappings != null) {
- for (ObjectObjectCursor typeEntry : mappings) {
- builder.field(typeEntry.key);
- builder.map(typeEntry.value.sourceAsMap());
+ for (Map.Entry> typeEntry : mappings.entrySet()) {
+ builder.field(typeEntry.getKey());
+ builder.map(typeEntry.getValue());
}
}
builder.endObject();
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/HashJoinComparisonStructure.java b/src/main/java/org/elasticsearch/plugin/nlpcn/HashJoinComparisonStructure.java
index 89e562da1..98edfface 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/HashJoinComparisonStructure.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/HashJoinComparisonStructure.java
@@ -3,7 +3,11 @@
import org.elasticsearch.search.SearchHit;
import org.nlpcn.es4sql.domain.Field;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
/**
* Created by Eliran on 2/11/2015.
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/HashJoinElasticExecutor.java b/src/main/java/org/elasticsearch/plugin/nlpcn/HashJoinElasticExecutor.java
index 34d52ee63..32b704112 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/HashJoinElasticExecutor.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/HashJoinElasticExecutor.java
@@ -3,28 +3,30 @@
import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.search.SearchType;
-import org.elasticsearch.client.Client;
-import org.elasticsearch.common.text.Text;
-import org.elasticsearch.common.unit.TimeValue;
-
+import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
-
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.sort.FieldSortBuilder;
-import org.elasticsearch.search.sort.SortOrder;
+import org.elasticsearch.search.lookup.Source;
import org.nlpcn.es4sql.domain.Field;
import org.nlpcn.es4sql.domain.Select;
import org.nlpcn.es4sql.domain.Where;
import org.nlpcn.es4sql.exception.SqlParseException;
import org.nlpcn.es4sql.query.join.HashJoinElasticRequestBuilder;
import org.nlpcn.es4sql.query.join.TableInJoinRequestBuilder;
-
import org.nlpcn.es4sql.query.maker.QueryMaker;
import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
/**
* Created by Eliran on 22/8/2015.
@@ -48,6 +50,7 @@ public HashJoinElasticExecutor(Client client, HashJoinElasticRequestBuilder requ
this.alreadyMatched = new HashSet<>();
}
+ @Override
public List innerRun() throws IOException, SqlParseException {
Map>> optimizationTermsFilterStructure =
@@ -135,11 +138,11 @@ private List createCombinedResults( TableInJoinRequestBuilder secondT
if (limitReached) break;
//todo: need to run on comparisons. for each comparison check if exists and add.
HashMap>> comparisons = this.hashJoinComparisonStructure.getComparisons();
-
+ Map secondTableHitSource = Source.fromBytes(secondTableHit.getSourceRef()).source();
for (Map.Entry>> comparison : comparisons.entrySet()) {
String comparisonID = comparison.getKey();
List> t1ToT2FieldsComparison = comparison.getValue();
- String key = getComparisonKey(t1ToT2FieldsComparison, secondTableHit, false, null);
+ String key = getComparisonKey(t1ToT2FieldsComparison, secondTableHitSource, false, null);
SearchHitsResult searchHitsResult = this.hashJoinComparisonStructure.searchForMatchingSearchHits(comparisonID, key);
@@ -157,18 +160,22 @@ private List createCombinedResults( TableInJoinRequestBuilder secondT
}
Map copiedSource = new HashMap();
- copyMaps(copiedSource,secondTableHit.getSourceAsMap());
+ copyMaps(copiedSource, secondTableHitSource);
onlyReturnedFields(copiedSource, secondTableRequest.getReturnedFields(),secondTableRequest.getOriginalSelect().isSelectAll());
- SearchHit searchHit = new SearchHit(matchingHit.docId(), combinedId, new Text(matchingHit.getType() + "|" + secondTableHit.getType()), matchingHit.getFields());
+ SearchHit searchHit = SearchHit.unpooled(matchingHit.docId(), combinedId);
+ searchHit.addDocumentFields(matchingHit.getDocumentFields(), Collections.emptyMap());
searchHit.sourceRef(matchingHit.getSourceRef());
- searchHit.getSourceAsMap().clear();
- searchHit.getSourceAsMap().putAll(matchingHit.getSourceAsMap());
+ Source source = Source.fromBytes(searchHit.getSourceRef());
+ Map hitSource = source.source();
+ hitSource.clear();
+ hitSource.putAll(Source.fromBytes(matchingHit.getSourceRef()).source());
String t1Alias = requestBuilder.getFirstTable().getAlias();
String t2Alias = requestBuilder.getSecondTable().getAlias();
- mergeSourceAndAddAliases(copiedSource, searchHit, t1Alias, t2Alias);
+ mergeSourceAndAddAliases(copiedSource, hitSource, t1Alias, t2Alias);
+ searchHit.sourceRef(Source.fromMap(hitSource, source.sourceContentType()).internalSourceRef());
combinedResult.add(searchHit);
resultIds++;
@@ -202,17 +209,22 @@ private void createKeyToResultsAndFillOptimizationStructure(Map>> comparisons = this.hashJoinComparisonStructure.getComparisons();
+ Map hitSource = Source.fromBytes(hit.getSourceRef()).source();
for (Map.Entry>> comparison : comparisons.entrySet()) {
String comparisonID = comparison.getKey();
List> t1ToT2FieldsComparison = comparison.getValue();
- String key = getComparisonKey(t1ToT2FieldsComparison, hit, true, optimizationTermsFilterStructure.get(comparisonID));
+ String key = getComparisonKey(t1ToT2FieldsComparison, hitSource, true, optimizationTermsFilterStructure.get(comparisonID));
//int docid , id
- SearchHit searchHit = new SearchHit(resultIds, hit.getId(), new Text(hit.getType()), hit.getFields());
+ SearchHit searchHit = SearchHit.unpooled(resultIds, hit.getId());
+ searchHit.addDocumentFields(hit.getDocumentFields(), Collections.emptyMap());
searchHit.sourceRef(hit.getSourceRef());
- onlyReturnedFields(searchHit.getSourceAsMap(), firstTableRequest.getReturnedFields(),firstTableRequest.getOriginalSelect().isSelectAll());
+ Source source = Source.fromBytes(searchHit.getSourceRef());
+ Map searchHitSource = source.source();
+ onlyReturnedFields(searchHitSource, firstTableRequest.getReturnedFields(),firstTableRequest.getOriginalSelect().isSelectAll());
+ searchHit.sourceRef(Source.fromMap(searchHitSource, source.sourceContentType()).internalSourceRef());
resultIds++;
this.hashJoinComparisonStructure.insertIntoComparisonHash(comparisonID, key, searchHit);
}
@@ -292,9 +304,8 @@ private void updateRequestWithTermsFilter(Map>>
secondTableRequest.getRequestBuilder().setQuery(boolQuery);
}
- private String getComparisonKey(List> t1ToT2FieldsComparison, SearchHit hit, boolean firstTable, Map> optimizationTermsFilterStructure) {
+ private String getComparisonKey(List> t1ToT2FieldsComparison, Map sourceAsMap, boolean firstTable, Map> optimizationTermsFilterStructure) {
String key = "";
- Map sourceAsMap = hit.getSourceAsMap();
for (Map.Entry t1ToT2 : t1ToT2FieldsComparison) {
//todo: change to our function find if key contains '.'
String name;
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/IntersectExecutor.java b/src/main/java/org/elasticsearch/plugin/nlpcn/IntersectExecutor.java
new file mode 100644
index 000000000..23adb9b84
--- /dev/null
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/IntersectExecutor.java
@@ -0,0 +1,151 @@
+package org.elasticsearch.plugin.nlpcn;
+
+import com.google.common.collect.Maps;
+import org.apache.lucene.search.TotalHits;
+import org.elasticsearch.action.ActionFuture;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.SearchHits;
+import org.elasticsearch.search.lookup.Source;
+import org.nlpcn.es4sql.domain.Field;
+import org.nlpcn.es4sql.query.multi.MultiQueryRequestBuilder;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.UUID;
+
+/**
+ * Intersect Executor
+ */
+public class IntersectExecutor implements ElasticHitsExecutor {
+
+ private MultiQueryRequestBuilder builder;
+ private SearchHits intersectHits;
+ private String[] fieldsOrderFirstTable;
+ private String[] fieldsOrderSecondTable;
+ private String separator;
+
+ public IntersectExecutor(MultiQueryRequestBuilder builder) {
+ this.builder = builder;
+ fillFieldsOrder();
+ separator = UUID.randomUUID().toString();
+ }
+
+ @Override
+ public void run() {
+ ActionFuture first = this.builder.getFirstSearchRequest().execute();
+ ActionFuture second = this.builder.getSecondSearchRequest().execute();
+
+ //
+ SearchHit[] hits = first.actionGet().getHits().getHits();
+ Set firstResult = new LinkedHashSet<>();
+ fillComparableSetFromHits(this.fieldsOrderFirstTable, hits, firstResult);
+
+ //
+ hits = second.actionGet().getHits().getHits();
+ Set secondResult = new HashSet<>();
+ fillComparableSetFromHits(this.fieldsOrderSecondTable, hits, secondResult);
+
+ // retain
+ firstResult.retainAll(secondResult);
+
+ fillIntersectHitsFromResults(firstResult);
+ }
+
+ @Override
+ public SearchHits getHits() {
+ return this.intersectHits;
+ }
+
+ private void fillIntersectHitsFromResults(Set comparableHitResults) {
+ int currentId = 1;
+ List intersectHitsList = new ArrayList<>(comparableHitResults.size());
+ Set> firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias().entrySet();
+ for (ComperableHitResult result : comparableHitResults) {
+ SearchHit originalHit = result.getOriginalHit();
+ SearchHit searchHit = SearchHit.unpooled(currentId, originalHit.getId());
+ searchHit.addDocumentFields(originalHit.getDocumentFields(), Collections.emptyMap());
+ searchHit.sourceRef(originalHit.getSourceRef());
+ Source source = Source.fromBytes(searchHit.getSourceRef());
+ Map hitSource = source.source();
+ hitSource.clear();
+ Map sourceAsMap = result.getFlattenMap();
+ for (Map.Entry entry : firstTableFieldToAlias) {
+ if (sourceAsMap.containsKey(entry.getKey())) {
+ Object value = sourceAsMap.get(entry.getKey());
+ sourceAsMap.remove(entry.getKey());
+ sourceAsMap.put(entry.getValue(), value);
+ }
+ }
+
+ hitSource.putAll(sourceAsMap);
+ searchHit.sourceRef(Source.fromMap(hitSource, source.sourceContentType()).internalSourceRef());
+ currentId++;
+ intersectHitsList.add(searchHit);
+ }
+ int totalSize = currentId - 1;
+ SearchHit[] unionHitsArr = intersectHitsList.toArray(new SearchHit[totalSize]);
+ this.intersectHits = SearchHits.unpooled(unionHitsArr, new TotalHits(totalSize, TotalHits.Relation.EQUAL_TO), 1.0f);
+ }
+
+ private void fillComparableSetFromHits(String[] fieldsOrder, SearchHit[] hits, Set setToFill) {
+ if (Objects.isNull(hits)) {
+ return;
+ }
+
+ for (SearchHit hit : hits) {
+ ComperableHitResult comperableHitResult = new ComperableHitResult(hit, fieldsOrder, this.separator);
+ if (!comperableHitResult.isAllNull()) {
+ setToFill.add(comperableHitResult);
+ }
+ }
+ }
+
+ private void fillFieldsOrder() {
+ Map firstTableFieldToAlias = this.builder.getFirstTableFieldToAlias();
+ List firstTableFields = this.builder.getOriginalSelect(true).getFields();
+
+ List fieldsOrAliases = new ArrayList<>();
+ for (Field field : firstTableFields) {
+ if (firstTableFieldToAlias.containsKey(field.getName())) {
+ fieldsOrAliases.add(field.getAlias());
+ } else {
+ fieldsOrAliases.add(field.getName());
+ }
+ }
+ Collections.sort(fieldsOrAliases);
+
+ int fieldsSize = fieldsOrAliases.size();
+
+ this.fieldsOrderFirstTable = new String[fieldsSize];
+ fillFieldsArray(fieldsOrAliases, firstTableFieldToAlias, this.fieldsOrderFirstTable);
+
+ this.fieldsOrderSecondTable = new String[fieldsSize];
+ fillFieldsArray(fieldsOrAliases, this.builder.getSecondTableFieldToAlias(), this.fieldsOrderSecondTable);
+ }
+
+ private void fillFieldsArray(List fieldsOrAliases, Map fieldsToAlias, String[] fields) {
+ Map aliasToField = inverseMap(fieldsToAlias);
+ for (int i = 0, len = fields.length; i < len; i++) {
+ String field = fieldsOrAliases.get(i);
+ if (aliasToField.containsKey(field)) {
+ field = aliasToField.get(field);
+ }
+ fields[i] = field;
+ }
+ }
+
+ private Map inverseMap(Map mapToInverse) {
+ Map inverseMap = Maps.newHashMapWithExpectedSize(mapToInverse.size());
+ for (Map.Entry entry : mapToInverse.entrySet()) {
+ inverseMap.put(entry.getValue(), entry.getKey());
+ }
+ return inverseMap;
+ }
+}
diff --git a/src/main/java/org/elasticsearch/plugin/nlpcn/MinusExecutor.java b/src/main/java/org/elasticsearch/plugin/nlpcn/MinusExecutor.java
index 6f43a319a..e320e932b 100644
--- a/src/main/java/org/elasticsearch/plugin/nlpcn/MinusExecutor.java
+++ b/src/main/java/org/elasticsearch/plugin/nlpcn/MinusExecutor.java
@@ -1,12 +1,13 @@
package org.elasticsearch.plugin.nlpcn;
+import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.client.Client;
+import org.elasticsearch.client.internal.Client;
import org.elasticsearch.common.document.DocumentField;
-import org.elasticsearch.common.text.Text;
-import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.core.TimeValue;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
+import org.elasticsearch.search.lookup.Source;
import org.nlpcn.es4sql.Util;
import org.nlpcn.es4sql.domain.Condition;
import org.nlpcn.es4sql.domain.Field;
@@ -19,7 +20,14 @@
import org.nlpcn.es4sql.query.multi.MultiQueryRequestBuilder;
import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
/**
* Created by Eliran on 26/8/2016.
@@ -108,18 +116,22 @@ private void fillMinusHitsFromOneField(String fieldName, Set