diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index e5e3ba43f74f0..402b87272ae5d 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -8,7 +8,6 @@ import java.nio.file.StandardCopyOption apply plugin: 'elasticsearch.esplugin' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' -apply plugin: 'com.github.johnrengelman.shadow' archivesBaseName = 'x-pack-core' @@ -27,7 +26,6 @@ dependencyLicenses { dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" - bundle project(':x-pack:protocol') compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" @@ -108,8 +106,7 @@ test { // TODO: don't publish test artifacts just to run messy tests, fix the tests! // https://github.com/elastic/x-plugins/issues/724 configurations { - testArtifacts.extendsFrom(testRuntime, shadow) - testArtifacts.exclude(group: project(':x-pack:protocol').group, module: project(':x-pack:protocol').name) + testArtifacts.extendsFrom testRuntime } task testJar(type: Jar) { appendix 'test' @@ -117,7 +114,7 @@ task testJar(type: Jar) { } artifacts { // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions - archives shadowJar + archives jar testArtifacts testJar } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java new file mode 100644 index 0000000000000..41f066daf93d3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.EnumSet; +import java.util.Locale; + +/** + * Fetch information about X-Pack from the cluster. + */ +public class XPackInfoRequest extends ActionRequest { + + public enum Category { + BUILD, LICENSE, FEATURES; + + public static EnumSet toSet(String... categories) { + EnumSet set = EnumSet.noneOf(Category.class); + for (String category : categories) { + switch (category) { + case "_all": + return EnumSet.allOf(Category.class); + case "_none": + return EnumSet.noneOf(Category.class); + default: + set.add(Category.valueOf(category.toUpperCase(Locale.ROOT))); + } + } + return set; + } + } + + private boolean verbose; + private EnumSet categories = EnumSet.noneOf(Category.class); + + public XPackInfoRequest() {} + + public void setVerbose(boolean verbose) { + this.verbose = verbose; + } + + public boolean isVerbose() { + return verbose; + } + + public void setCategories(EnumSet categories) { + this.categories = categories; + } + + public EnumSet getCategories() { + return categories; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.verbose = in.readBoolean(); + EnumSet categories = EnumSet.noneOf(Category.class); + int size = in.readVInt(); + for (int i = 0; i < size; i++) { + categories.add(Category.valueOf(in.readString())); + } + this.categories = categories; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(verbose); + out.writeVInt(categories.size()); + for (Category category : categories) { + out.writeString(category.name()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java new file mode 100644 index 0000000000000..2a7eddcf35395 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -0,0 +1,487 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class XPackInfoResponse extends ActionResponse implements ToXContentObject { + /** + * Value of the license's expiration time if it should never expire. + */ + public static final long BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS = Long.MAX_VALUE - TimeUnit.HOURS.toMillis(24 * 365); + // TODO move this constant to License.java once we move License.java to the protocol jar + + @Nullable private BuildInfo buildInfo; + @Nullable private LicenseInfo licenseInfo; + @Nullable private FeatureSetsInfo featureSetsInfo; + + public XPackInfoResponse() {} + + public XPackInfoResponse(@Nullable BuildInfo buildInfo, @Nullable LicenseInfo licenseInfo, @Nullable FeatureSetsInfo featureSetsInfo) { + this.buildInfo = buildInfo; + this.licenseInfo = licenseInfo; + this.featureSetsInfo = featureSetsInfo; + } + + /** + * @return The build info (incl. build hash and timestamp) + */ + public BuildInfo getBuildInfo() { + return buildInfo; + } + + /** + * @return The current license info (incl. UID, type/mode. status and expiry date). May return {@code null} when no + * license is currently installed. + */ + public LicenseInfo getLicenseInfo() { + return licenseInfo; + } + + /** + * @return The current status of the feature sets in X-Pack. Feature sets describe the features available/enabled in X-Pack. + */ + public FeatureSetsInfo getFeatureSetsInfo() { + return featureSetsInfo; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalWriteable(buildInfo); + out.writeOptionalWriteable(licenseInfo); + out.writeOptionalWriteable(featureSetsInfo); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.buildInfo = in.readOptionalWriteable(BuildInfo::new); + this.licenseInfo = in.readOptionalWriteable(LicenseInfo::new); + this.featureSetsInfo = in.readOptionalWriteable(FeatureSetsInfo::new); + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + XPackInfoResponse rhs = (XPackInfoResponse) other; + return Objects.equals(buildInfo, rhs.buildInfo) + && Objects.equals(licenseInfo, rhs.licenseInfo) + && Objects.equals(featureSetsInfo, rhs.featureSetsInfo); + } + + @Override + public int hashCode() { + return Objects.hash(buildInfo, licenseInfo, featureSetsInfo); + } + + @Override + public String toString() { + return Strings.toString(this, true, false); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "xpack_info_response", true, (a, v) -> { + BuildInfo buildInfo = (BuildInfo) a[0]; + LicenseInfo licenseInfo = (LicenseInfo) a[1]; + @SuppressWarnings("unchecked") // This is how constructing object parser works + List featureSets = (List) a[2]; + FeatureSetsInfo featureSetsInfo = featureSets == null ? null : new FeatureSetsInfo(new HashSet<>(featureSets)); + return new XPackInfoResponse(buildInfo, licenseInfo, featureSetsInfo); + }); + static { + PARSER.declareObject(optionalConstructorArg(), BuildInfo.PARSER, new ParseField("build")); + /* + * licenseInfo is sort of "double optional" because it is + * optional but it can also be send as `null`. + */ + PARSER.declareField(optionalConstructorArg(), (p, v) -> { + if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + return LicenseInfo.PARSER.parse(p, v); + }, + new ParseField("license"), ValueType.OBJECT_OR_NULL); + PARSER.declareNamedObjects(optionalConstructorArg(), + (p, c, name) -> FeatureSetsInfo.FeatureSet.PARSER.parse(p, name), + new ParseField("features")); + } + public static XPackInfoResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (buildInfo != null) { + builder.field("build", buildInfo, params); + } + + EnumSet categories = XPackInfoRequest.Category + .toSet(Strings.splitStringByCommaToArray(params.param("categories", "_all"))); + if (licenseInfo != null) { + builder.field("license", licenseInfo, params); + } else if (categories.contains(XPackInfoRequest.Category.LICENSE)) { + // if the user requested the license info, and there is no license, we should send + // back an explicit null value (indicating there is no license). This is different + // than not adding the license info at all + builder.nullField("license"); + } + + if (featureSetsInfo != null) { + builder.field("features", featureSetsInfo, params); + } + + if (params.paramAsBoolean("human", true)) { + builder.field("tagline", "You know, for X"); + } + + return builder.endObject(); + } + + public static class LicenseInfo implements ToXContentObject, Writeable { + private final String uid; + private final String type; + private final String mode; + private final LicenseStatus status; + private final long expiryDate; + + public LicenseInfo(String uid, String type, String mode, LicenseStatus status, long expiryDate) { + this.uid = uid; + this.type = type; + this.mode = mode; + this.status = status; + this.expiryDate = expiryDate; + } + + public LicenseInfo(StreamInput in) throws IOException { + this(in.readString(), in.readString(), in.readString(), LicenseStatus.readFrom(in), in.readLong()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uid); + out.writeString(type); + out.writeString(mode); + status.writeTo(out); + out.writeLong(expiryDate); + } + + public String getUid() { + return uid; + } + + public String getType() { + return type; + } + + public String getMode() { + return mode; + } + + public long getExpiryDate() { + return expiryDate; + } + + public LicenseStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + LicenseInfo rhs = (LicenseInfo) other; + return Objects.equals(uid, rhs.uid) + && Objects.equals(type, rhs.type) + && Objects.equals(mode, rhs.mode) + && Objects.equals(status, rhs.status) + && expiryDate == rhs.expiryDate; + } + + @Override + public int hashCode() { + return Objects.hash(uid, type, mode, status, expiryDate); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "license_info", true, (a, v) -> { + String uid = (String) a[0]; + String type = (String) a[1]; + String mode = (String) a[2]; + LicenseStatus status = LicenseStatus.fromString((String) a[3]); + Long expiryDate = (Long) a[4]; + long primitiveExpiryDate = expiryDate == null ? BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS : expiryDate; + return new LicenseInfo(uid, type, mode, status, primitiveExpiryDate); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("uid")); + PARSER.declareString(constructorArg(), new ParseField("type")); + PARSER.declareString(constructorArg(), new ParseField("mode")); + PARSER.declareString(constructorArg(), new ParseField("status")); + PARSER.declareLong(optionalConstructorArg(), new ParseField("expiry_date_in_millis")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + .field("uid", uid) + .field("type", type) + .field("mode", mode) + .field("status", status.label()); + if (expiryDate != BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) { + builder.timeField("expiry_date_in_millis", "expiry_date", expiryDate); + } + return builder.endObject(); + } + } + + public static class BuildInfo implements ToXContentObject, Writeable { + private final String hash; + private final String timestamp; + + public BuildInfo(String hash, String timestamp) { + this.hash = hash; + this.timestamp = timestamp; + } + + public BuildInfo(StreamInput input) throws IOException { + this(input.readString(), input.readString()); + } + + @Override + public void writeTo(StreamOutput output) throws IOException { + output.writeString(hash); + output.writeString(timestamp); + } + + public String getHash() { + return hash; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + BuildInfo rhs = (BuildInfo) other; + return Objects.equals(hash, rhs.hash) + && Objects.equals(timestamp, rhs.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(hash, timestamp); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "build_info", true, (a, v) -> new BuildInfo((String) a[0], (String) a[1])); + static { + PARSER.declareString(constructorArg(), new ParseField("hash")); + PARSER.declareString(constructorArg(), new ParseField("date")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("hash", hash) + .field("date", timestamp) + .endObject(); + } + } + + public static class FeatureSetsInfo implements ToXContentObject, Writeable { + private final Map featureSets; + + public FeatureSetsInfo(Set featureSets) { + Map map = new HashMap<>(featureSets.size()); + for (FeatureSet featureSet : featureSets) { + map.put(featureSet.name, featureSet); + } + this.featureSets = Collections.unmodifiableMap(map); + } + + public FeatureSetsInfo(StreamInput in) throws IOException { + int size = in.readVInt(); + Map featureSets = new HashMap<>(size); + for (int i = 0; i < size; i++) { + FeatureSet featureSet = new FeatureSet(in); + featureSets.put(featureSet.name, featureSet); + } + this.featureSets = Collections.unmodifiableMap(featureSets); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(featureSets.size()); + for (FeatureSet featureSet : featureSets.values()) { + featureSet.writeTo(out); + } + } + + public Map getFeatureSets() { + return featureSets; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + FeatureSetsInfo rhs = (FeatureSetsInfo) other; + return Objects.equals(featureSets, rhs.featureSets); + } + + @Override + public int hashCode() { + return Objects.hash(featureSets); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + List names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().collect(Collectors.toList()); + for (String name : names) { + builder.field(name, featureSets.get(name), params); + } + return builder.endObject(); + } + + public static class FeatureSet implements ToXContentObject, Writeable { + private final String name; + @Nullable private final String description; + private final boolean available; + private final boolean enabled; + @Nullable private final Map nativeCodeInfo; + + public FeatureSet(String name, @Nullable String description, boolean available, boolean enabled, + @Nullable Map nativeCodeInfo) { + this.name = name; + this.description = description; + this.available = available; + this.enabled = enabled; + this.nativeCodeInfo = nativeCodeInfo; + } + + public FeatureSet(StreamInput in) throws IOException { + this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), + in.getVersion().onOrAfter(Version.V_5_4_0) ? in.readMap() : null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeOptionalString(description); + out.writeBoolean(available); + out.writeBoolean(enabled); + if (out.getVersion().onOrAfter(Version.V_5_4_0)) { + out.writeMap(nativeCodeInfo); + } + } + + public String name() { + return name; + } + + @Nullable + public String description() { + return description; + } + + public boolean available() { + return available; + } + + public boolean enabled() { + return enabled; + } + + @Nullable + public Map nativeCodeInfo() { + return nativeCodeInfo; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + FeatureSet rhs = (FeatureSet) other; + return Objects.equals(name, rhs.name) + && Objects.equals(description, rhs.description) + && available == rhs.available + && enabled == rhs.enabled + && Objects.equals(nativeCodeInfo, rhs.nativeCodeInfo); + } + + @Override + public int hashCode() { + return Objects.hash(name, description, available, enabled, nativeCodeInfo); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "feature_set", true, (a, name) -> { + String description = (String) a[0]; + boolean available = (Boolean) a[1]; + boolean enabled = (Boolean) a[2]; + @SuppressWarnings("unchecked") // Matches up with declaration below + Map nativeCodeInfo = (Map) a[3]; + return new FeatureSet(name, description, available, enabled, nativeCodeInfo); + }); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("description")); + PARSER.declareBoolean(constructorArg(), new ParseField("available")); + PARSER.declareBoolean(constructorArg(), new ParseField("enabled")); + PARSER.declareObject(optionalConstructorArg(), (p, name) -> p.map(), new ParseField("native_code_info")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (description != null) { + builder.field("description", description); + } + builder.field("available", available); + builder.field("enabled", enabled); + if (nativeCodeInfo != null) { + builder.field("native_code_info", nativeCodeInfo); + } + return builder.endObject(); + } + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java new file mode 100644 index 0000000000000..83621a9ac3d41 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeRequest; + +public class XPackUsageRequest extends MasterNodeRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java new file mode 100644 index 0000000000000..ccf681837fdcd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Response object from calling the xpack usage api. + * + * Usage information for each feature is accessible through {@link #getUsages()}. + */ +public class XPackUsageResponse { + + private final Map> usages; + + private XPackUsageResponse(Map> usages) throws IOException { + this.usages = usages; + } + + @SuppressWarnings("unchecked") + private static Map castMap(Object value) { + return (Map)value; + } + + /** Return a map from feature name to usage information for that feature. */ + public Map> getUsages() { + return usages; + } + + public static XPackUsageResponse fromXContent(XContentParser parser) throws IOException { + Map rawMap = parser.map(); + Map> usages = rawMap.entrySet().stream().collect( + Collectors.toMap(Map.Entry::getKey, e -> castMap(e.getValue()))); + return new XPackUsageResponse(usages); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java new file mode 100644 index 0000000000000..3934095512120 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/common/ProtocolUtils.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.common; + +import java.util.Arrays; +import java.util.Map; + +/** + * Common utilities used for XPack protocol classes + */ +public final class ProtocolUtils { + + /** + * Implements equals for a map of string arrays + * + * The map of string arrays is used in some XPack protocol classes but does't work with equal. + */ + public static boolean equals(Map a, Map b) { + if (a == null) { + return b == null; + } + if (b == null) { + return false; + } + if (a.size() != b.size()) { + return false; + } + for (Map.Entry entry : a.entrySet()) { + String[] val = entry.getValue(); + String key = entry.getKey(); + if (val == null) { + if (b.get(key) != null || b.containsKey(key) == false) { + return false; + } + } else { + if (Arrays.equals(val, b.get(key)) == false) { + return false; + } + } + } + return true; + } + + /** + * Implements hashCode for map of string arrays + * + * The map of string arrays does't work with hashCode. + */ + public static int hashCode(Map a) { + int hash = 0; + for (Map.Entry entry : a.entrySet()) + hash += Arrays.hashCode(entry.getValue()); + return hash; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java new file mode 100644 index 0000000000000..994c7e2c2d5a3 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Connection.java @@ -0,0 +1,216 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import com.carrotsearch.hppc.ObjectIntHashMap; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +/** + * A Connection links exactly two {@link Vertex} objects. The basis of a + * connection is one or more documents have been found that contain + * this pair of terms and the strength of the connection is recorded + * as a weight. + */ +public class Connection { + private Vertex from; + private Vertex to; + private double weight; + private long docCount; + + public Connection(Vertex from, Vertex to, double weight, long docCount) { + this.from = from; + this.to = to; + this.weight = weight; + this.docCount = docCount; + } + + public Connection(StreamInput in, Map vertices) throws IOException { + from = vertices.get(new VertexId(in.readString(), in.readString())); + to = vertices.get(new VertexId(in.readString(), in.readString())); + weight = in.readDouble(); + docCount = in.readVLong(); + } + + Connection() { + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(from.getField()); + out.writeString(from.getTerm()); + out.writeString(to.getField()); + out.writeString(to.getTerm()); + out.writeDouble(weight); + out.writeVLong(docCount); + } + + public ConnectionId getId() { + return new ConnectionId(from.getId(), to.getId()); + } + + public Vertex getFrom() { + return from; + } + + public Vertex getTo() { + return to; + } + + /** + * @return a measure of the relative connectedness between a pair of {@link Vertex} objects + */ + public double getWeight() { + return weight; + } + + /** + * @return the number of documents in the sampled set that contained this + * pair of {@link Vertex} objects. + */ + public long getDocCount() { + return docCount; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Connection other = (Connection) obj; + return docCount == other.docCount && + weight == other.weight && + Objects.equals(to, other.to) && + Objects.equals(from, other.from); + } + + @Override + public int hashCode() { + return Objects.hash(docCount, weight, from, to); + } + + + private static final ParseField SOURCE = new ParseField("source"); + private static final ParseField TARGET = new ParseField("target"); + private static final ParseField WEIGHT = new ParseField("weight"); + private static final ParseField DOC_COUNT = new ParseField("doc_count"); + + + void toXContent(XContentBuilder builder, Params params, ObjectIntHashMap vertexNumbers) throws IOException { + builder.field(SOURCE.getPreferredName(), vertexNumbers.get(from)); + builder.field(TARGET.getPreferredName(), vertexNumbers.get(to)); + builder.field(WEIGHT.getPreferredName(), weight); + builder.field(DOC_COUNT.getPreferredName(), docCount); + } + + //When deserializing from XContent we need to wait for all vertices to be loaded before + // Connection objects can be created that reference them. This class provides the interim + // state for connections. + static class UnresolvedConnection { + int fromIndex; + int toIndex; + double weight; + long docCount; + UnresolvedConnection(int fromIndex, int toIndex, double weight, long docCount) { + super(); + this.fromIndex = fromIndex; + this.toIndex = toIndex; + this.weight = weight; + this.docCount = docCount; + } + public Connection resolve(List vertices) { + return new Connection(vertices.get(fromIndex), vertices.get(toIndex), weight, docCount); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "ConnectionParser", true, + args -> { + int source = (Integer) args[0]; + int target = (Integer) args[1]; + double weight = (Double) args[2]; + long docCount = (Long) args[3]; + return new UnresolvedConnection(source, target, weight, docCount); + }); + + static { + PARSER.declareInt(constructorArg(), SOURCE); + PARSER.declareInt(constructorArg(), TARGET); + PARSER.declareDouble(constructorArg(), WEIGHT); + PARSER.declareLong(constructorArg(), DOC_COUNT); + } + static UnresolvedConnection fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + } + + + /** + * An identifier (implements hashcode and equals) that represents a + * unique key for a {@link Connection} + */ + public static class ConnectionId { + private final VertexId source; + private final VertexId target; + + public ConnectionId(VertexId source, VertexId target) { + this.source = source; + this.target = target; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ConnectionId vertexId = (ConnectionId) o; + + if (source != null ? !source.equals(vertexId.source) : vertexId.source != null) + return false; + if (target != null ? !target.equals(vertexId.target) : vertexId.target != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = source != null ? source.hashCode() : 0; + result = 31 * result + (target != null ? target.hashCode() : 0); + return result; + } + + public VertexId getSource() { + return source; + } + + public VertexId getTarget() { + return target; + } + + @Override + public String toString() { + return getSource() + "->" + getTarget(); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java new file mode 100644 index 0000000000000..196982c0a35fb --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreRequest.java @@ -0,0 +1,388 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +/** + * Holds the criteria required to guide the exploration of connected terms which + * can be returned as a graph. + */ +public class GraphExploreRequest extends ActionRequest implements IndicesRequest.Replaceable, ToXContentObject { + + public static final String NO_HOPS_ERROR_MESSAGE = "Graph explore request must have at least one hop"; + public static final String NO_VERTICES_ERROR_MESSAGE = "Graph explore hop must have at least one VertexRequest"; + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); + private String[] types = Strings.EMPTY_ARRAY; + private String routing; + private TimeValue timeout; + + private int sampleSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE; + private String sampleDiversityField; + private int maxDocsPerDiversityValue; + private boolean useSignificance = true; + private boolean returnDetailedInfo; + + private List hops = new ArrayList<>(); + + public GraphExploreRequest() { + } + + /** + * Constructs a new graph request to run against the provided indices. No + * indices means it will run against all indices. + */ + public GraphExploreRequest(String... indices) { + this.indices = indices; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (hops.size() == 0) { + validationException = ValidateActions.addValidationError(NO_HOPS_ERROR_MESSAGE, validationException); + } + for (Hop hop : hops) { + validationException = hop.validate(validationException); + } + return validationException; + } + + @Override + public String[] indices() { + return this.indices; + } + + @Override + public GraphExploreRequest indices(String... indices) { + this.indices = indices; + return this; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public GraphExploreRequest indicesOptions(IndicesOptions indicesOptions) { + if (indicesOptions == null) { + throw new IllegalArgumentException("IndicesOptions must not be null"); + } + this.indicesOptions = indicesOptions; + return this; + } + + public String[] types() { + return this.types; + } + + public GraphExploreRequest types(String... types) { + this.types = types; + return this; + } + + public String routing() { + return this.routing; + } + + public GraphExploreRequest routing(String routing) { + this.routing = routing; + return this; + } + + public GraphExploreRequest routing(String... routings) { + this.routing = Strings.arrayToCommaDelimitedString(routings); + return this; + } + + public TimeValue timeout() { + return timeout; + } + + /** + * Graph exploration can be set to timeout after the given period. Search + * operations involved in each hop are limited to the remaining time + * available but can still overrun due to the nature of their "best efforts" + * timeout support. When a timeout occurs partial results are returned. + * + * @param timeout + * a {@link TimeValue} object which determines the maximum length + * of time to spend exploring + */ + public GraphExploreRequest timeout(TimeValue timeout) { + if (timeout == null) { + throw new IllegalArgumentException("timeout must not be null"); + } + this.timeout = timeout; + return this; + } + + public GraphExploreRequest timeout(String timeout) { + timeout(TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout")); + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + types = in.readStringArray(); + routing = in.readOptionalString(); + timeout = in.readOptionalTimeValue(); + sampleSize = in.readInt(); + sampleDiversityField = in.readOptionalString(); + maxDocsPerDiversityValue = in.readInt(); + + useSignificance = in.readBoolean(); + returnDetailedInfo = in.readBoolean(); + + int numHops = in.readInt(); + Hop parentHop = null; + for (int i = 0; i < numHops; i++) { + Hop hop = new Hop(parentHop); + hop.readFrom(in); + hops.add(hop); + parentHop = hop; + } + + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + out.writeStringArray(types); + out.writeOptionalString(routing); + out.writeOptionalTimeValue(timeout); + + out.writeInt(sampleSize); + out.writeOptionalString(sampleDiversityField); + out.writeInt(maxDocsPerDiversityValue); + + out.writeBoolean(useSignificance); + out.writeBoolean(returnDetailedInfo); + out.writeInt(hops.size()); + for (Iterator iterator = hops.iterator(); iterator.hasNext();) { + Hop hop = iterator.next(); + hop.writeTo(out); + } + } + + @Override + public String toString() { + return "graph explore [" + Arrays.toString(indices) + "][" + Arrays.toString(types) + "]"; + } + + /** + * The number of top-matching documents that are considered during each hop + * (default is {@link SamplerAggregationBuilder#DEFAULT_SHARD_SAMPLE_SIZE} + * Very small values (less than 50) may not provide sufficient + * weight-of-evidence to identify significant connections between terms. + *

+ * Very large values (many thousands) are not recommended with loosely + * defined queries (fuzzy queries or those with many OR clauses). This is + * because any useful signals in the best documents are diluted with + * irrelevant noise from low-quality matches. Performance is also typically + * better with smaller samples as there are less look-ups required for + * background frequencies of terms found in the documents + *

+ * + * @param maxNumberOfDocsPerHop + * shard-level sample size in documents + */ + public void sampleSize(int maxNumberOfDocsPerHop) { + sampleSize = maxNumberOfDocsPerHop; + } + + public int sampleSize() { + return sampleSize; + } + + /** + * Optional choice of single-value field on which to diversify sampled + * search results + */ + public void sampleDiversityField(String name) { + sampleDiversityField = name; + } + + public String sampleDiversityField() { + return sampleDiversityField; + } + + /** + * Optional number of permitted docs with same value in sampled search + * results. Must also declare which field using sampleDiversityField + */ + public void maxDocsPerDiversityValue(int maxDocs) { + this.maxDocsPerDiversityValue = maxDocs; + } + + public int maxDocsPerDiversityValue() { + return maxDocsPerDiversityValue; + } + + /** + * Controls the choice of algorithm used to select interesting terms. The + * default value is true which means terms are selected based on + * significance (see the {@link SignificantTerms} aggregation) rather than + * popularity (using the {@link TermsAggregator}). + * + * @param value + * true if the significant_terms algorithm should be used. + */ + public void useSignificance(boolean value) { + this.useSignificance = value; + } + + public boolean useSignificance() { + return useSignificance; + } + + /** + * Return detailed information about vertex frequencies as part of JSON + * results - defaults to false + * + * @param value + * true if detailed information is required in JSON responses + */ + public void returnDetailedInfo(boolean value) { + this.returnDetailedInfo = value; + } + + public boolean returnDetailedInfo() { + return returnDetailedInfo; + } + + /** + * Add a stage in the graph exploration. Each hop represents a stage of + * querying elasticsearch to identify terms which can then be connnected to + * other terms in a subsequent hop. + * + * @param guidingQuery + * optional choice of query which influences which documents are + * considered in this stage + * @return a {@link Hop} object that holds settings for a stage in the graph + * exploration + */ + public Hop createNextHop(QueryBuilder guidingQuery) { + Hop parent = null; + if (hops.size() > 0) { + parent = hops.get(hops.size() - 1); + } + Hop newHop = new Hop(parent); + newHop.guidingQuery = guidingQuery; + hops.add(newHop); + return newHop; + } + + public int getHopNumbers() { + return hops.size(); + } + + public Hop getHop(int hopNumber) { + return hops.get(hopNumber); + } + + public static class TermBoost { + String term; + float boost; + + public TermBoost(String term, float boost) { + super(); + this.term = term; + if (boost <= 0) { + throw new IllegalArgumentException("Boosts must be a positive non-zero number"); + } + this.boost = boost; + } + + TermBoost() { + } + + public String getTerm() { + return term; + } + + public float getBoost() { + return boost; + } + + void readFrom(StreamInput in) throws IOException { + this.term = in.readString(); + this.boost = in.readFloat(); + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(term); + out.writeFloat(boost); + } + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.startObject("controls"); + { + if (sampleSize != SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE) { + builder.field("sample_size", sampleSize); + } + if (sampleDiversityField != null) { + builder.startObject("sample_diversity"); + builder.field("field", sampleDiversityField); + builder.field("max_docs_per_value", maxDocsPerDiversityValue); + builder.endObject(); + } + builder.field("use_significance", useSignificance); + if (returnDetailedInfo) { + builder.field("return_detailed_stats", returnDetailedInfo); + } + } + builder.endObject(); + + for (Hop hop : hops) { + if (hop.parentHop != null) { + builder.startObject("connections"); + } + hop.toXContent(builder, params); + } + for (Hop hop : hops) { + if (hop.parentHop != null) { + builder.endObject(); + } + } + builder.endObject(); + + return builder; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java new file mode 100644 index 0000000000000..12eb20617ff0f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponse.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import com.carrotsearch.hppc.ObjectIntHashMap; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.graph.Connection.ConnectionId; +import org.elasticsearch.protocol.xpack.graph.Connection.UnresolvedConnection; +import org.elasticsearch.protocol.xpack.graph.Vertex.VertexId; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Graph explore response holds a graph of {@link Vertex} and {@link Connection} objects + * (nodes and edges in common graph parlance). + * + * @see GraphExploreRequest + */ +public class GraphExploreResponse extends ActionResponse implements ToXContentObject { + + private long tookInMillis; + private boolean timedOut = false; + private ShardOperationFailedException[] shardFailures = ShardSearchFailure.EMPTY_ARRAY; + private Map vertices; + private Map connections; + private boolean returnDetailedInfo; + static final String RETURN_DETAILED_INFO_PARAM = "returnDetailedInfo"; + + public GraphExploreResponse() { + } + + public GraphExploreResponse(long tookInMillis, boolean timedOut, ShardOperationFailedException[] shardFailures, + Map vertices, Map connections, boolean returnDetailedInfo) { + this.tookInMillis = tookInMillis; + this.timedOut = timedOut; + this.shardFailures = shardFailures; + this.vertices = vertices; + this.connections = connections; + this.returnDetailedInfo = returnDetailedInfo; + } + + + public TimeValue getTook() { + return new TimeValue(tookInMillis); + } + + public long getTookInMillis() { + return tookInMillis; + } + + /** + * @return true if the time stated in {@link GraphExploreRequest#timeout(TimeValue)} was exceeded + * (not all hops may have been completed in this case) + */ + public boolean isTimedOut() { + return this.timedOut; + } + public ShardOperationFailedException[] getShardFailures() { + return shardFailures; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + tookInMillis = in.readVLong(); + timedOut = in.readBoolean(); + + int size = in.readVInt(); + if (size == 0) { + shardFailures = ShardSearchFailure.EMPTY_ARRAY; + } else { + shardFailures = new ShardSearchFailure[size]; + for (int i = 0; i < shardFailures.length; i++) { + shardFailures[i] = readShardSearchFailure(in); + } + } + // read vertices + size = in.readVInt(); + vertices = new HashMap<>(); + for (int i = 0; i < size; i++) { + Vertex n = Vertex.readFrom(in); + vertices.put(n.getId(), n); + } + + size = in.readVInt(); + + connections = new HashMap<>(); + for (int i = 0; i < size; i++) { + Connection e = new Connection(in, vertices); + connections.put(e.getId(), e); + } + + returnDetailedInfo = in.readBoolean(); + + } + + public Collection getConnections() { + return connections.values(); + } + + public Collection getVertices() { + return vertices.values(); + } + + public Vertex getVertex(VertexId id) { + return vertices.get(id); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVLong(tookInMillis); + out.writeBoolean(timedOut); + + out.writeVInt(shardFailures.length); + for (ShardOperationFailedException shardSearchFailure : shardFailures) { + shardSearchFailure.writeTo(out); + } + + out.writeVInt(vertices.size()); + for (Vertex vertex : vertices.values()) { + vertex.writeTo(out); + } + + out.writeVInt(connections.size()); + for (Connection connection : connections.values()) { + connection.writeTo(out); + } + + out.writeBoolean(returnDetailedInfo); + + } + + private static final ParseField TOOK = new ParseField("took"); + private static final ParseField TIMED_OUT = new ParseField("timed_out"); + private static final ParseField VERTICES = new ParseField("vertices"); + private static final ParseField CONNECTIONS = new ParseField("connections"); + private static final ParseField FAILURES = new ParseField("failures"); + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TOOK.getPreferredName(), tookInMillis); + builder.field(TIMED_OUT.getPreferredName(), timedOut); + + builder.startArray(FAILURES.getPreferredName()); + if (shardFailures != null) { + for (ShardOperationFailedException shardFailure : shardFailures) { + builder.startObject(); + shardFailure.toXContent(builder, params); + builder.endObject(); + } + } + builder.endArray(); + + ObjectIntHashMap vertexNumbers = new ObjectIntHashMap<>(vertices.size()); + + Map extraParams = new HashMap<>(); + extraParams.put(RETURN_DETAILED_INFO_PARAM, Boolean.toString(returnDetailedInfo)); + Params extendedParams = new DelegatingMapParams(extraParams, params); + + builder.startArray(VERTICES.getPreferredName()); + for (Vertex vertex : vertices.values()) { + builder.startObject(); + vertexNumbers.put(vertex, vertexNumbers.size()); + vertex.toXContent(builder, extendedParams); + builder.endObject(); + } + builder.endArray(); + + builder.startArray(CONNECTIONS.getPreferredName()); + for (Connection connection : connections.values()) { + builder.startObject(); + connection.toXContent(builder, extendedParams, vertexNumbers); + builder.endObject(); + } + builder.endArray(); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "GraphExploreResponsenParser", true, + args -> { + GraphExploreResponse result = new GraphExploreResponse(); + result.vertices = new HashMap<>(); + result.connections = new HashMap<>(); + + result.tookInMillis = (Long) args[0]; + result.timedOut = (Boolean) args[1]; + + @SuppressWarnings("unchecked") + List vertices = (List) args[2]; + @SuppressWarnings("unchecked") + List unresolvedConnections = (List) args[3]; + @SuppressWarnings("unchecked") + List failures = (List) args[4]; + for (Vertex vertex : vertices) { + // reverse-engineer if detailed stats were requested - + // mainly here for testing framework's equality tests + result.returnDetailedInfo = result.returnDetailedInfo || vertex.getFg() > 0; + result.vertices.put(vertex.getId(), vertex); + } + for (UnresolvedConnection unresolvedConnection : unresolvedConnections) { + Connection resolvedConnection = unresolvedConnection.resolve(vertices); + result.connections.put(resolvedConnection.getId(), resolvedConnection); + } + if (failures.size() > 0) { + result.shardFailures = failures.toArray(new ShardSearchFailure[failures.size()]); + } + return result; + }); + + static { + PARSER.declareLong(constructorArg(), TOOK); + PARSER.declareBoolean(constructorArg(), TIMED_OUT); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> Vertex.fromXContent(p), VERTICES); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> UnresolvedConnection.fromXContent(p), CONNECTIONS); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ShardSearchFailure.fromXContent(p), FAILURES); + } + + public static GraphExploreResponse fromXContext(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java new file mode 100644 index 0000000000000..e61403e8b37a8 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Hop.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * A Hop represents one of potentially many stages in a graph exploration. + * Each Hop identifies one or more fields in which it will attempt to find + * terms that are significantly connected to the previous Hop. Each field is identified + * using a {@link VertexRequest} + * + *

An example series of Hops on webserver logs would be: + *

    + *
  1. an initial Hop to find + * the top ten IPAddresses trying to access urls containing the word "admin"
  2. + *
  3. a secondary Hop to see which other URLs those IPAddresses were trying to access
  4. + *
+ * + *

+ * Optionally, each hop can contain a "guiding query" that further limits the set of documents considered. + * In our weblog example above we might choose to constrain the second hop to only look at log records that + * had a reponse code of 404. + *

+ *

+ * If absent, the list of {@link VertexRequest}s is inherited from the prior Hop's list to avoid repeating + * the fields that will be examined at each stage. + *

+ * + */ +public class Hop implements ToXContentFragment{ + final Hop parentHop; + List vertices = null; + QueryBuilder guidingQuery = null; + + public Hop(Hop parent) { + this.parentHop = parent; + } + + public ActionRequestValidationException validate(ActionRequestValidationException validationException) { + + if (getEffectiveVertexRequests().size() == 0) { + validationException = ValidateActions.addValidationError(GraphExploreRequest.NO_VERTICES_ERROR_MESSAGE, validationException); + } + return validationException; + + } + + public Hop getParentHop() { + return parentHop; + } + + void writeTo(StreamOutput out) throws IOException { + out.writeOptionalNamedWriteable(guidingQuery); + if (vertices == null) { + out.writeVInt(0); + } else { + out.writeVInt(vertices.size()); + for (VertexRequest vr : vertices) { + vr.writeTo(out); + } + } + } + + void readFrom(StreamInput in) throws IOException { + guidingQuery = in.readOptionalNamedWriteable(QueryBuilder.class); + int size = in.readVInt(); + if (size > 0) { + vertices = new ArrayList<>(); + for (int i = 0; i < size; i++) { + VertexRequest vr = new VertexRequest(); + vr.readFrom(in); + vertices.add(vr); + } + } + } + + public QueryBuilder guidingQuery() { + if (guidingQuery != null) { + return guidingQuery; + } + return QueryBuilders.matchAllQuery(); + } + + /** + * Add a field in which this {@link Hop} will look for terms that are highly linked to + * previous hops and optionally the guiding query. + * + * @param fieldName a field in the chosen index + */ + public VertexRequest addVertexRequest(String fieldName) { + if (vertices == null) { + vertices = new ArrayList<>(); + } + VertexRequest vr = new VertexRequest(); + vr.fieldName(fieldName); + vertices.add(vr); + return vr; + } + + /** + * An optional parameter that focuses the exploration on documents that + * match the given query. + * + * @param queryBuilder any query + */ + public void guidingQuery(QueryBuilder queryBuilder) { + guidingQuery = queryBuilder; + } + + protected List getEffectiveVertexRequests() { + if (vertices != null) { + return vertices; + } + if (parentHop == null) { + return Collections.emptyList(); + } + // otherwise inherit settings from parent + return parentHop.getEffectiveVertexRequests(); + } + + public int getNumberVertexRequests() { + return getEffectiveVertexRequests().size(); + } + + public VertexRequest getVertexRequest(int requestNumber) { + return getEffectiveVertexRequests().get(requestNumber); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (guidingQuery != null) { + builder.field("query"); + guidingQuery.toXContent(builder, params); + } + if(vertices != null && vertices.size()>0) { + builder.startArray("vertices"); + for (VertexRequest vertexRequest : vertices) { + vertexRequest.toXContent(builder, params); + } + builder.endArray(); + } + return builder; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java new file mode 100644 index 0000000000000..f17812a6396a0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java @@ -0,0 +1,255 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * A vertex in a graph response represents a single term (a field and value pair) + * which appears in one or more documents found as part of the graph exploration. + * + * A vertex term could be a bank account number, an email address, a hashtag or any + * other term that appears in documents and is interesting to represent in a network. + */ +public class Vertex implements ToXContentFragment { + + private final String field; + private final String term; + private double weight; + private final int depth; + private final long bg; + private long fg; + private static final ParseField FIELD = new ParseField("field"); + private static final ParseField TERM = new ParseField("term"); + private static final ParseField WEIGHT = new ParseField("weight"); + private static final ParseField DEPTH = new ParseField("depth"); + private static final ParseField FG = new ParseField("fg"); + private static final ParseField BG = new ParseField("bg"); + + + public Vertex(String field, String term, double weight, int depth, long bg, long fg) { + super(); + this.field = field; + this.term = term; + this.weight = weight; + this.depth = depth; + this.bg = bg; + this.fg = fg; + } + + static Vertex readFrom(StreamInput in) throws IOException { + return new Vertex(in.readString(), in.readString(), in.readDouble(), in.readVInt(), in.readVLong(), in.readVLong()); + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(field); + out.writeString(term); + out.writeDouble(weight); + out.writeVInt(depth); + out.writeVLong(bg); + out.writeVLong(fg); + } + + @Override + public int hashCode() { + return Objects.hash(field, term, weight, depth, bg, fg); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Vertex other = (Vertex) obj; + return depth == other.depth && + weight == other.weight && + bg == other.bg && + fg == other.fg && + Objects.equals(field, other.field) && + Objects.equals(term, other.term); + + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + boolean returnDetailedInfo = params.paramAsBoolean(GraphExploreResponse.RETURN_DETAILED_INFO_PARAM, false); + builder.field(FIELD.getPreferredName(), field); + builder.field(TERM.getPreferredName(), term); + builder.field(WEIGHT.getPreferredName(), weight); + builder.field(DEPTH.getPreferredName(), depth); + if (returnDetailedInfo) { + builder.field(FG.getPreferredName(), fg); + builder.field(BG.getPreferredName(), bg); + } + return builder; + } + + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "VertexParser", true, + args -> { + String field = (String) args[0]; + String term = (String) args[1]; + double weight = (Double) args[2]; + int depth = (Integer) args[3]; + Long optionalBg = (Long) args[4]; + Long optionalFg = (Long) args[5]; + long bg = optionalBg == null ? 0 : optionalBg; + long fg = optionalFg == null ? 0 : optionalFg; + return new Vertex(field, term, weight, depth, bg, fg); + }); + + static { + PARSER.declareString(constructorArg(), FIELD); + PARSER.declareString(constructorArg(), TERM); + PARSER.declareDouble(constructorArg(), WEIGHT); + PARSER.declareInt(constructorArg(), DEPTH); + PARSER.declareLong(optionalConstructorArg(), BG); + PARSER.declareLong(optionalConstructorArg(), FG); + } + + static Vertex fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + + + /** + * @return a {@link VertexId} object that uniquely identifies this Vertex + */ + public VertexId getId() { + return createId(field, term); + } + + /** + * A convenience method for creating a {@link VertexId} + * @param field the field + * @param term the term + * @return a {@link VertexId} that can be used for looking up vertices + */ + public static VertexId createId(String field, String term) { + return new VertexId(field,term); + } + + @Override + public String toString() { + return getId().toString(); + } + + public String getField() { + return field; + } + + public String getTerm() { + return term; + } + + /** + * The weight of a vertex is an accumulation of all of the {@link Connection}s + * that are linked to this {@link Vertex} as part of a graph exploration. + * It is used internally to identify the most interesting vertices to be returned. + * @return a measure of the {@link Vertex}'s relative importance. + */ + public double getWeight() { + return weight; + } + + public void setWeight(final double weight) { + this.weight = weight; + } + + /** + * If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default) + * this statistic is available. + * @return the number of documents in the index that contain this term (see bg_count in + * + * the significant_terms aggregation) + */ + public long getBg() { + return bg; + } + + /** + * If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default) + * this statistic is available. + * Together with {@link #getBg()} these numbers are used to derive the significance of a term. + * @return the number of documents in the sample of best matching documents that contain this term (see fg_count in + * + * the significant_terms aggregation) + */ + public long getFg() { + return fg; + } + + public void setFg(final long fg) { + this.fg = fg; + } + + /** + * @return the sequence number in the series of hops where this Vertex term was first encountered + */ + public int getHopDepth() { + return depth; + } + + /** + * An identifier (implements hashcode and equals) that represents a + * unique key for a {@link Vertex} + */ + public static class VertexId { + private final String field; + private final String term; + + public VertexId(String field, String term) { + this.field = field; + this.term = term; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + VertexId vertexId = (VertexId) o; + + if (field != null ? !field.equals(vertexId.field) : vertexId.field != null) + return false; + if (term != null ? !term.equals(vertexId.term) : vertexId.term != null) + return false; + + return true; + } + + @Override + public int hashCode() { + int result = field != null ? field.hashCode() : 0; + result = 31 * result + (term != null ? term.hashCode() : 0); + return result; + } + + @Override + public String toString() { + return field + ":" + term; + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java new file mode 100644 index 0000000000000..63d2c616547d4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/VertexRequest.java @@ -0,0 +1,235 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest.TermBoost; + +import java.io.IOException; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +/** + * A request to identify terms from a choice of field as part of a {@link Hop}. + * Optionally, a set of terms can be provided that are used as an exclusion or + * inclusion list to filter which terms are considered. + * + */ +public class VertexRequest implements ToXContentObject { + private String fieldName; + private int size = DEFAULT_SIZE; + public static final int DEFAULT_SIZE = 5; + private Map includes; + private Set excludes; + public static final int DEFAULT_MIN_DOC_COUNT = 3; + private int minDocCount = DEFAULT_MIN_DOC_COUNT; + public static final int DEFAULT_SHARD_MIN_DOC_COUNT = 2; + private int shardMinDocCount = DEFAULT_SHARD_MIN_DOC_COUNT; + + + public VertexRequest() { + + } + + void readFrom(StreamInput in) throws IOException { + fieldName = in.readString(); + size = in.readVInt(); + minDocCount = in.readVInt(); + shardMinDocCount = in.readVInt(); + + int numIncludes = in.readVInt(); + if (numIncludes > 0) { + includes = new HashMap<>(); + for (int i = 0; i < numIncludes; i++) { + TermBoost tb = new TermBoost(); + tb.readFrom(in); + includes.put(tb.term, tb); + } + } + + int numExcludes = in.readVInt(); + if (numExcludes > 0) { + excludes = new HashSet<>(); + for (int i = 0; i < numExcludes; i++) { + excludes.add(in.readString()); + } + } + + } + + void writeTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeVInt(size); + out.writeVInt(minDocCount); + out.writeVInt(shardMinDocCount); + + if (includes != null) { + out.writeVInt(includes.size()); + for (TermBoost tb : includes.values()) { + tb.writeTo(out); + } + } else { + out.writeVInt(0); + } + + if (excludes != null) { + out.writeVInt(excludes.size()); + for (String term : excludes) { + out.writeString(term); + } + } else { + out.writeVInt(0); + } + } + + public String fieldName() { + return fieldName; + } + + public VertexRequest fieldName(String fieldName) { + this.fieldName = fieldName; + return this; + } + + public int size() { + return size; + } + + /** + * @param size The maximum number of terms that should be returned from this field as part of this {@link Hop} + */ + public VertexRequest size(int size) { + this.size = size; + return this; + } + + public boolean hasIncludeClauses() { + return includes != null && includes.size() > 0; + } + + public boolean hasExcludeClauses() { + return excludes != null && excludes.size() > 0; + } + + /** + * Adds a term that should be excluded from results + * @param term A term to be excluded + */ + public void addExclude(String term) { + if (includes != null) { + throw new IllegalArgumentException("Cannot have both include and exclude clauses"); + } + if (excludes == null) { + excludes = new HashSet<>(); + } + excludes.add(term); + } + + /** + * Adds a term to the set of allowed values - the boost defines the relative + * importance when pursuing connections in subsequent {@link Hop}s. The boost value + * appears as part of the query. + * @param term a required term + * @param boost an optional boost + */ + public void addInclude(String term, float boost) { + if (excludes != null) { + throw new IllegalArgumentException("Cannot have both include and exclude clauses"); + } + if (includes == null) { + includes = new HashMap<>(); + } + includes.put(term, new TermBoost(term, boost)); + } + + public TermBoost[] includeValues() { + return includes.values().toArray(new TermBoost[includes.size()]); + } + + public String[] includeValuesAsStringArray() { + String[] result = new String[includes.size()]; + int i = 0; + for (TermBoost tb : includes.values()) { + result[i++] = tb.term; + } + return result; + } + + public String[] excludesAsArray() { + return excludes.toArray(new String[excludes.size()]); + } + + public int minDocCount() { + return minDocCount; + } + + /** + * A "certainty" threshold which defines the weight-of-evidence required before + * a term found in this field is identified as a useful connection + * + * @param value The minimum number of documents that contain this term found in the samples used across all shards + */ + public VertexRequest minDocCount(int value) { + minDocCount = value; + return this; + } + + + public int shardMinDocCount() { + return Math.min(shardMinDocCount, minDocCount); + } + + /** + * A "certainty" threshold which defines the weight-of-evidence required before + * a term found in this field is identified as a useful connection + * + * @param value The minimum number of documents that contain this term found in the samples used across all shards + */ + public VertexRequest shardMinDocCount(int value) { + shardMinDocCount = value; + return this; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("field", fieldName); + if (size != DEFAULT_SIZE) { + builder.field("size", size); + } + if (minDocCount != DEFAULT_MIN_DOC_COUNT) { + builder.field("min_doc_count", minDocCount); + } + if (shardMinDocCount != DEFAULT_SHARD_MIN_DOC_COUNT) { + builder.field("shard_min_doc_count", shardMinDocCount); + } + if(includes!=null) { + builder.startArray("include"); + for (TermBoost tb : includes.values()) { + builder.startObject(); + builder.field("term", tb.term); + builder.field("boost", tb.boost); + builder.endObject(); + } + builder.endArray(); + } + if(excludes!=null) { + builder.startArray("exclude"); + for (String value : excludes) { + builder.value(value); + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java new file mode 100644 index 0000000000000..5d5dd0f5ef61d --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Graph + * APIs. + */ +package org.elasticsearch.protocol.xpack.graph; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java new file mode 100644 index 0000000000000..62353b093b5b5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/DeleteLicenseRequest.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; + + +public class DeleteLicenseRequest extends AcknowledgedRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java new file mode 100644 index 0000000000000..971e181ee13d4 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; + + +public class GetLicenseRequest extends MasterNodeReadRequest { + + public GetLicenseRequest() { + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java new file mode 100644 index 0000000000000..6d5e1b5653fe7 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseResponse.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionResponse; + +public class GetLicenseResponse extends ActionResponse { + + private String license; + + GetLicenseResponse() { + } + + public GetLicenseResponse(String license) { + this.license = license; + } + + public String getLicenseDefinition() { + return license; + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java new file mode 100644 index 0000000000000..5bc66ab745e49 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.io.IOException; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +/** + * Status of an X-Pack license. + */ +public enum LicenseStatus implements Writeable { + + ACTIVE("active"), + INVALID("invalid"), + EXPIRED("expired"); + + private final String label; + + LicenseStatus(String label) { + this.label = label; + } + + public String label() { + return label; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(label); + } + + public static LicenseStatus readFrom(StreamInput in) throws IOException { + return fromString(in.readString()); + } + + public static LicenseStatus fromString(String value) { + switch (value) { + case "active": + return ACTIVE; + case "invalid": + return INVALID; + case "expired": + return EXPIRED; + default: + throw new IllegalArgumentException("unknown license status [" + value + "]"); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java new file mode 100644 index 0000000000000..18745653e761e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/LicensesStatus.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.util.Locale; + +public enum LicensesStatus { + VALID((byte) 0), + INVALID((byte) 1), + EXPIRED((byte) 2); + + private final byte id; + + LicensesStatus(byte id) { + this.id = id; + } + + public int id() { + return id; + } + + public static LicensesStatus fromId(int id) { + if (id == 0) { + return VALID; + } else if (id == 1) { + return INVALID; + } else if (id == 2) { + return EXPIRED; + } else { + throw new IllegalStateException("no valid LicensesStatus for id=" + id); + } + } + + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + public static LicensesStatus fromString(String value) { + switch (value) { + case "valid": + return VALID; + case "invalid": + return INVALID; + case "expired": + return EXPIRED; + default: + throw new IllegalArgumentException("unknown licenses status [" + value + "]"); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java new file mode 100644 index 0000000000000..342e6c296e7ed --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseRequest.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; + +public class PutLicenseRequest extends AcknowledgedRequest { + + private String licenseDefinition; + private boolean acknowledge = false; + + public PutLicenseRequest() { + + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public void setLicenseDefinition(String licenseDefinition) { + this.licenseDefinition = licenseDefinition; + } + + public String getLicenseDefinition() { + return licenseDefinition; + } + + public void setAcknowledge(boolean acknowledge) { + this.acknowledge = acknowledge; + } + + public boolean isAcknowledge() { + return acknowledge; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java new file mode 100644 index 0000000000000..206c5a3b38366 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponse.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.common.ProtocolUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class PutLicenseResponse extends AcknowledgedResponse { + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "put_license_response", true, (a, v) -> { + boolean acknowledged = (Boolean) a[0]; + LicensesStatus licensesStatus = LicensesStatus.fromString((String) a[1]); + @SuppressWarnings("unchecked") Tuple> acknowledgements = (Tuple>) a[2]; + if (acknowledgements == null) { + return new PutLicenseResponse(acknowledged, licensesStatus); + } else { + return new PutLicenseResponse(acknowledged, licensesStatus, acknowledgements.v1(), acknowledgements.v2()); + } + + }); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField("acknowledged")); + PARSER.declareString(constructorArg(), new ParseField("license_status")); + PARSER.declareObject(optionalConstructorArg(), (parser, v) -> { + Map acknowledgeMessages = new HashMap<>(); + String message = null; + XContentParser.Token token; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else { + if (currentFieldName == null) { + throw new XContentParseException(parser.getTokenLocation(), "expected message header or acknowledgement"); + } + if ("message".equals(currentFieldName)) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected message header type"); + } + message = parser.text(); + } else { + if (token != XContentParser.Token.START_ARRAY) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement type"); + } + List acknowledgeMessagesList = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.VALUE_STRING) { + throw new XContentParseException(parser.getTokenLocation(), "unexpected acknowledgement text"); + } + acknowledgeMessagesList.add(parser.text()); + } + acknowledgeMessages.put(currentFieldName, acknowledgeMessagesList.toArray(new String[0])); + } + } + } + return new Tuple<>(message, acknowledgeMessages); + }, + new ParseField("acknowledge")); + } + + private LicensesStatus status; + private Map acknowledgeMessages; + private String acknowledgeHeader; + + public PutLicenseResponse() { + } + + public PutLicenseResponse(boolean acknowledged, LicensesStatus status) { + this(acknowledged, status, null, Collections.emptyMap()); + } + + public PutLicenseResponse(boolean acknowledged, LicensesStatus status, String acknowledgeHeader, + Map acknowledgeMessages) { + super(acknowledged); + this.status = status; + this.acknowledgeHeader = acknowledgeHeader; + this.acknowledgeMessages = acknowledgeMessages; + } + + public LicensesStatus status() { + return status; + } + + public Map acknowledgeMessages() { + return acknowledgeMessages; + } + + public String acknowledgeHeader() { + return acknowledgeHeader; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + status = LicensesStatus.fromId(in.readVInt()); + acknowledgeHeader = in.readOptionalString(); + int size = in.readVInt(); + Map acknowledgeMessages = new HashMap<>(size); + for (int i = 0; i < size; i++) { + String feature = in.readString(); + int nMessages = in.readVInt(); + String[] messages = new String[nMessages]; + for (int j = 0; j < nMessages; j++) { + messages[j] = in.readString(); + } + acknowledgeMessages.put(feature, messages); + } + this.acknowledgeMessages = acknowledgeMessages; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeVInt(status.id()); + out.writeOptionalString(acknowledgeHeader); + out.writeVInt(acknowledgeMessages.size()); + for (Map.Entry entry : acknowledgeMessages.entrySet()) { + out.writeString(entry.getKey()); + out.writeVInt(entry.getValue().length); + for (String message : entry.getValue()) { + out.writeString(message); + } + } + } + + @Override + protected void addCustomFields(XContentBuilder builder, Params params) throws IOException { + builder.field("license_status", status.toString()); + if (!acknowledgeMessages.isEmpty()) { + builder.startObject("acknowledge"); + builder.field("message", acknowledgeHeader); + for (Map.Entry entry : acknowledgeMessages.entrySet()) { + builder.startArray(entry.getKey()); + for (String message : entry.getValue()) { + builder.value(message); + } + builder.endArray(); + } + builder.endObject(); + } + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + public static PutLicenseResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (!super.equals(o)) return false; + PutLicenseResponse that = (PutLicenseResponse) o; + + return status == that.status && + ProtocolUtils.equals(acknowledgeMessages, that.acknowledgeMessages) && + Objects.equals(acknowledgeHeader, that.acknowledgeHeader); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), status, ProtocolUtils.hashCode(acknowledgeMessages), acknowledgeHeader); + } + + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java new file mode 100644 index 0000000000000..a0a80a9958b95 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's License + * APIs. + */ +package org.elasticsearch.protocol.xpack.license; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java new file mode 100644 index 0000000000000..3ae952fb618e2 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequest.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +public class IndexUpgradeInfoRequest extends MasterNodeReadRequest implements IndicesRequest.Replaceable { + + private String[] indices = Strings.EMPTY_ARRAY; + private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true); + + public IndexUpgradeInfoRequest(String... indices) { + indices(indices); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndexUpgradeInfoRequest indices(String... indices) { + this.indices = Objects.requireNonNull(indices, "indices cannot be null"); + return this; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + public void indicesOptions(IndicesOptions indicesOptions) { + this.indicesOptions = indicesOptions; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexUpgradeInfoRequest request = (IndexUpgradeInfoRequest) o; + return Arrays.equals(indices, request.indices) && + Objects.equals(indicesOptions.toString(), request.indicesOptions.toString()); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(indices), indicesOptions.toString()); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java new file mode 100644 index 0000000000000..17115ac9b1711 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponse.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class IndexUpgradeInfoResponse extends ActionResponse implements ToXContentObject { + + private static final ParseField INDICES = new ParseField("indices"); + private static final ParseField ACTION_REQUIRED = new ParseField("action_required"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("IndexUpgradeInfoResponse", + true, + (a, c) -> { + @SuppressWarnings("unchecked") + Map map = (Map)a[0]; + Map actionsRequired = map.entrySet().stream() + .filter(e -> { + if (e.getValue() instanceof Map == false) { + return false; + } + @SuppressWarnings("unchecked") + Map value =(Map)e.getValue(); + return value.containsKey(ACTION_REQUIRED.getPreferredName()); + }) + .collect(Collectors.toMap( + Map.Entry::getKey, + e -> { + @SuppressWarnings("unchecked") + Map value = (Map) e.getValue(); + return UpgradeActionRequired.fromString((String)value.get(ACTION_REQUIRED.getPreferredName())); + } + )); + return new IndexUpgradeInfoResponse(actionsRequired); + }); + + static { + PARSER.declareObject(constructorArg(), (p, c) -> p.map(), INDICES); + } + + + private Map actions; + + public IndexUpgradeInfoResponse() { + + } + + public IndexUpgradeInfoResponse(Map actions) { + this.actions = actions; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + actions = in.readMap(StreamInput::readString, UpgradeActionRequired::readFromStream); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(actions, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + public Map getActions() { + return actions; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.startObject(INDICES.getPreferredName()); + for (Map.Entry entry : actions.entrySet()) { + builder.startObject(entry.getKey()); + { + builder.field(ACTION_REQUIRED.getPreferredName(), entry.getValue().toString()); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + IndexUpgradeInfoResponse response = (IndexUpgradeInfoResponse) o; + return Objects.equals(actions, response.actions); + } + + @Override + public int hashCode() { + return Objects.hash(actions); + } + + public static IndexUpgradeInfoResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java new file mode 100644 index 0000000000000..dce1c7d18f50e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/UpgradeActionRequired.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Locale; + +/** + * Indicates the type of the upgrade required for the index + */ +public enum UpgradeActionRequired implements Writeable { + NOT_APPLICABLE, // Indicates that the check is not applicable to this index type, the next check will be performed + UP_TO_DATE, // Indicates that the check finds this index to be up to date - no additional checks are required + REINDEX, // The index should be reindex + UPGRADE; // The index should go through the upgrade procedure + + public static UpgradeActionRequired fromString(String value) { + return UpgradeActionRequired.valueOf(value.toUpperCase(Locale.ROOT)); + } + + public static UpgradeActionRequired readFromStream(StreamInput in) throws IOException { + return in.readEnum(UpgradeActionRequired.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(this); + } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java new file mode 100644 index 0000000000000..7c52f6a8fd4f1 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/migration/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Migration + * APIs. + */ +package org.elasticsearch.protocol.xpack.migration; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java new file mode 100644 index 0000000000000..3ed877d08cccd --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/package-info.java @@ -0,0 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for miscellaneous X-Pack APIs. + */ +package org.elasticsearch.protocol.xpack; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java new file mode 100644 index 0000000000000..e5b116a3a7a98 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java @@ -0,0 +1,246 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.security; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; + +/** + * An authenticated user + */ +public class User implements ToXContentObject { + + private final String username; + private final String[] roles; + private final User authenticatedUser; + private final Map metadata; + private final boolean enabled; + + @Nullable private final String fullName; + @Nullable private final String email; + + public User(String username, String... roles) { + this(username, roles, null, null, null, true); + } + + public User(String username, String[] roles, User authenticatedUser) { + this(username, roles, null, null, null, true, authenticatedUser); + } + + public User(User user, User authenticatedUser) { + this(user.principal(), user.roles(), user.fullName(), user.email(), user.metadata(), user.enabled(), authenticatedUser); + } + + public User(String username, String[] roles, String fullName, String email, Map metadata, boolean enabled) { + this(username, roles, fullName, email, metadata, enabled, null); + } + + private User(String username, String[] roles, String fullName, String email, Map metadata, boolean enabled, + User authenticatedUser) { + this.username = username; + this.roles = roles == null ? Strings.EMPTY_ARRAY : roles; + this.metadata = metadata != null ? Collections.unmodifiableMap(metadata) : Collections.emptyMap(); + this.fullName = fullName; + this.email = email; + this.enabled = enabled; + assert (authenticatedUser == null || authenticatedUser.isRunAs() == false) : "the authenticated user should not be a run_as user"; + this.authenticatedUser = authenticatedUser; + } + + /** + * @return The principal of this user - effectively serving as the + * unique identity of of the user. + */ + public String principal() { + return this.username; + } + + /** + * @return The roles this user is associated with. The roles are + * identified by their unique names and each represents as + * set of permissions + */ + public String[] roles() { + return this.roles; + } + + /** + * @return The metadata that is associated with this user. Can never be {@code null}. + */ + public Map metadata() { + return metadata; + } + + /** + * @return The full name of this user. May be {@code null}. + */ + public String fullName() { + return fullName; + } + + /** + * @return The email of this user. May be {@code null}. + */ + public String email() { + return email; + } + + /** + * @return whether the user is enabled or not + */ + public boolean enabled() { + return enabled; + } + + /** + * @return The user that was originally authenticated. + * This may be the user itself, or a different user which used runAs. + */ + public User authenticatedUser() { + return authenticatedUser == null ? this : authenticatedUser; + } + + /** Return true if this user was not the originally authenticated user, false otherwise. */ + public boolean isRunAs() { + return authenticatedUser != null; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("User[username=").append(username); + sb.append(",roles=[").append(Strings.arrayToCommaDelimitedString(roles)).append("]"); + sb.append(",fullName=").append(fullName); + sb.append(",email=").append(email); + sb.append(",metadata="); + sb.append(metadata); + if (authenticatedUser != null) { + sb.append(",authenticatedUser=[").append(authenticatedUser.toString()).append("]"); + } + sb.append("]"); + return sb.toString(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o instanceof User == false) return false; + + User user = (User) o; + + if (!username.equals(user.username)) return false; + // Probably incorrect - comparing Object[] arrays with Arrays.equals + if (!Arrays.equals(roles, user.roles)) return false; + if (authenticatedUser != null ? !authenticatedUser.equals(user.authenticatedUser) : user.authenticatedUser != null) return false; + if (!metadata.equals(user.metadata)) return false; + if (fullName != null ? !fullName.equals(user.fullName) : user.fullName != null) return false; + return !(email != null ? !email.equals(user.email) : user.email != null); + + } + + @Override + public int hashCode() { + int result = username.hashCode(); + result = 31 * result + Arrays.hashCode(roles); + result = 31 * result + (authenticatedUser != null ? authenticatedUser.hashCode() : 0); + result = 31 * result + metadata.hashCode(); + result = 31 * result + (fullName != null ? fullName.hashCode() : 0); + result = 31 * result + (email != null ? email.hashCode() : 0); + return result; + } + + @Override + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.USERNAME.getPreferredName(), principal()); + builder.array(Fields.ROLES.getPreferredName(), roles()); + builder.field(Fields.FULL_NAME.getPreferredName(), fullName()); + builder.field(Fields.EMAIL.getPreferredName(), email()); + builder.field(Fields.METADATA.getPreferredName(), metadata()); + builder.field(Fields.ENABLED.getPreferredName(), enabled()); + return builder.endObject(); + } + + public static User partialReadFrom(String username, StreamInput input) throws IOException { + String[] roles = input.readStringArray(); + Map metadata = input.readMap(); + String fullName = input.readOptionalString(); + String email = input.readOptionalString(); + boolean enabled = input.readBoolean(); + User outerUser = new User(username, roles, fullName, email, metadata, enabled, null); + boolean hasInnerUser = input.readBoolean(); + if (hasInnerUser) { + User innerUser = readFrom(input); + if (input.getVersion().onOrBefore(Version.V_5_4_0)) { + // backcompat: runas user was read first, so reverse outer and inner + return new User(innerUser, outerUser); + } else { + return new User(outerUser, innerUser); + } + } else { + return outerUser; + } + } + + public static User readFrom(StreamInput input) throws IOException { + final boolean isInternalUser = input.readBoolean(); + assert isInternalUser == false: "should always return false. Internal users should use the InternalUserSerializationHelper"; + final String username = input.readString(); + return partialReadFrom(username, input); + } + + public static void writeTo(User user, StreamOutput output) throws IOException { + if (user.authenticatedUser == null) { + // no backcompat necessary, since there is no inner user + writeUser(user, output); + } else if (output.getVersion().onOrBefore(Version.V_5_4_0)) { + // backcompat: write runas user as the "inner" user + writeUser(user.authenticatedUser, output); + output.writeBoolean(true); + writeUser(user, output); + } else { + writeUser(user, output); + output.writeBoolean(true); + writeUser(user.authenticatedUser, output); + } + output.writeBoolean(false); // last user written, regardless of bwc, does not have an inner user + } + + /** Write just the given {@link User}, but not the inner {@link #authenticatedUser}. */ + private static void writeUser(User user, StreamOutput output) throws IOException { + output.writeBoolean(false); // not a system user + output.writeString(user.username); + output.writeStringArray(user.roles); + output.writeMap(user.metadata); + output.writeOptionalString(user.fullName); + output.writeOptionalString(user.email); + output.writeBoolean(user.enabled); + } + + public interface Fields { + ParseField USERNAME = new ParseField("username"); + ParseField PASSWORD = new ParseField("password"); + ParseField PASSWORD_HASH = new ParseField("password_hash"); + ParseField ROLES = new ParseField("roles"); + ParseField FULL_NAME = new ParseField("full_name"); + ParseField EMAIL = new ParseField("email"); + ParseField METADATA = new ParseField("metadata"); + ParseField ENABLED = new ParseField("enabled"); + ParseField TYPE = new ParseField("type"); + } +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java new file mode 100644 index 0000000000000..ce627b267f31e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Security + * APIs. + */ +package org.elasticsearch.protocol.xpack.security; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java new file mode 100644 index 0000000000000..3480d8485f069 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchRequest.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.unit.TimeValue; + +import java.io.IOException; + +/** + * A delete watch request to delete an watch by name (id) + */ +public class DeleteWatchRequest extends MasterNodeRequest { + + private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(10); + + private String id; + private long version = Versions.MATCH_ANY; + + public DeleteWatchRequest() { + this(null); + } + + public DeleteWatchRequest(String id) { + this.id = id; + masterNodeTimeout(DEFAULT_TIMEOUT); + } + + /** + * @return The name of the watch to be deleted + */ + public String getId() { + return id; + } + + /** + * Sets the name of the watch to be deleted + */ + public void setId(String id) { + this.id = id; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (id == null){ + validationException = ValidateActions.addValidationError("watch id is missing", validationException); + } else if (PutWatchRequest.isValidId(id) == false) { + validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeLong(version); + } + + @Override + public String toString() { + return "delete [" + id + "]"; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java new file mode 100644 index 0000000000000..39cd5e966fa12 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponse.java @@ -0,0 +1,110 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class DeleteWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_delete_watch_response", DeleteWatchResponse::new); + static { + PARSER.declareString(DeleteWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(DeleteWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(DeleteWatchResponse::setFound, new ParseField("found")); + } + + private String id; + private long version; + private boolean found; + + public DeleteWatchResponse() { + } + + public DeleteWatchResponse(String id, long version, boolean found) { + this.id = id; + this.version = version; + this.found = found; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isFound() { + return found; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setFound(boolean found) { + this.found = found; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + DeleteWatchResponse that = (DeleteWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(found, that.found); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, found); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + found = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(found); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("found", found) + .endObject(); + } + + public static DeleteWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java new file mode 100644 index 0000000000000..abc42b149194b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchRequest.java @@ -0,0 +1,162 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ValidateActions; +import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.regex.Pattern; + +/** + * This request class contains the data needed to create a watch along with the name of the watch. + * The name of the watch will become the ID of the indexed document. + */ +public class PutWatchRequest extends MasterNodeRequest { + + private static final TimeValue DEFAULT_TIMEOUT = TimeValue.timeValueSeconds(10); + private static final Pattern NO_WS_PATTERN = Pattern.compile("\\S+"); + + private String id; + private BytesReference source; + private XContentType xContentType = XContentType.JSON; + private boolean active = true; + private long version = Versions.MATCH_ANY; + + public PutWatchRequest() {} + + public PutWatchRequest(StreamInput in) throws IOException { + readFrom(in); + } + + public PutWatchRequest(String id, BytesReference source, XContentType xContentType) { + this.id = id; + this.source = source; + this.xContentType = xContentType; + masterNodeTimeout(DEFAULT_TIMEOUT); + } + + /** + * @return The name that will be the ID of the indexed document + */ + public String getId() { + return id; + } + + /** + * Set the watch name + */ + public void setId(String id) { + this.id = id; + } + + /** + * @return The source of the watch + */ + public BytesReference getSource() { + return source; + } + + /** + * Set the source of the watch + */ + public void setSource(BytesReference source, XContentType xContentType) { + this.source = source; + this.xContentType = xContentType; + } + + /** + * @return The initial active state of the watch (defaults to {@code true}, e.g. "active") + */ + public boolean isActive() { + return active; + } + + /** + * Sets the initial active state of the watch + */ + public void setActive(boolean active) { + this.active = active; + } + + /** + * Get the content type for the source + */ + public XContentType xContentType() { + return xContentType; + } + + public long getVersion() { + return version; + } + + public void setVersion(long version) { + this.version = version; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (id == null) { + validationException = ValidateActions.addValidationError("watch id is missing", validationException); + } else if (isValidId(id) == false) { + validationException = ValidateActions.addValidationError("watch id contains whitespace", validationException); + } + if (source == null) { + validationException = ValidateActions.addValidationError("watch source is missing", validationException); + } + if (xContentType == null) { + validationException = ValidateActions.addValidationError("request body is missing", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + source = in.readBytesReference(); + active = in.readBoolean(); + if (in.getVersion().onOrAfter(Version.V_5_3_0)) { + xContentType = in.readEnum(XContentType.class); + } else { + xContentType = XContentHelper.xContentType(source); + } + if (in.getVersion().onOrAfter(Version.V_6_3_0)) { + version = in.readZLong(); + } else { + version = Versions.MATCH_ANY; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeBytesReference(source); + out.writeBoolean(active); + if (out.getVersion().onOrAfter(Version.V_5_3_0)) { + out.writeEnum(xContentType); + } + if (out.getVersion().onOrAfter(Version.V_6_3_0)) { + out.writeZLong(version); + } + } + + public static boolean isValidId(String id) { + return Strings.isEmpty(id) == false && NO_WS_PATTERN.matcher(id).matches(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java new file mode 100644 index 0000000000000..f6e55ff555339 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponse.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +public class PutWatchResponse extends ActionResponse implements ToXContentObject { + + private static final ObjectParser PARSER + = new ObjectParser<>("x_pack_put_watch_response", PutWatchResponse::new); + static { + PARSER.declareString(PutWatchResponse::setId, new ParseField("_id")); + PARSER.declareLong(PutWatchResponse::setVersion, new ParseField("_version")); + PARSER.declareBoolean(PutWatchResponse::setCreated, new ParseField("created")); + } + + private String id; + private long version; + private boolean created; + + public PutWatchResponse() { + } + + public PutWatchResponse(String id, long version, boolean created) { + this.id = id; + this.version = version; + this.created = created; + } + + private void setId(String id) { + this.id = id; + } + + private void setVersion(long version) { + this.version = version; + } + + private void setCreated(boolean created) { + this.created = created; + } + + public String getId() { + return id; + } + + public long getVersion() { + return version; + } + + public boolean isCreated() { + return created; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + PutWatchResponse that = (PutWatchResponse) o; + + return Objects.equals(id, that.id) && Objects.equals(version, that.version) && Objects.equals(created, that.created); + } + + @Override + public int hashCode() { + return Objects.hash(id, version, created); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeVLong(version); + out.writeBoolean(created); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + id = in.readString(); + version = in.readVLong(); + created = in.readBoolean(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("_id", id) + .field("_version", version) + .field("created", created) + .endObject(); + } + + public static PutWatchResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java new file mode 100644 index 0000000000000..0d9edf3b5c035 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +/** + * Request and Response objects for the default distribution's Watcher + * APIs. + */ +package org.elasticsearch.protocol.xpack.watcher; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java new file mode 100644 index 0000000000000..fac99959c536a --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; +import java.io.IOException; + +public class XPackInfoResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected XPackInfoResponse doParseInstance(XContentParser parser) throws IOException { + return XPackInfoResponse.fromXContent(parser); + } + + @Override + protected XPackInfoResponse createBlankInstance() { + return new XPackInfoResponse(); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return path -> path.equals("features") + || (path.startsWith("features") && path.endsWith("native_code_info")); + } + + @Override + protected ToXContent.Params getToXContentParams() { + Map params = new HashMap<>(); + if (randomBoolean()) { + params.put("human", randomBoolean() ? "true" : "false"); + } + if (randomBoolean()) { + params.put("categories", "_none"); + } + return new ToXContent.MapParams(params); + } + + @Override + protected XPackInfoResponse createTestInstance() { + return new XPackInfoResponse( + randomBoolean() ? null : randomBuildInfo(), + randomBoolean() ? null : randomLicenseInfo(), + randomBoolean() ? null : randomFeatureSetsInfo()); + } + + @Override + protected XPackInfoResponse mutateInstance(XPackInfoResponse response) { + @SuppressWarnings("unchecked") + Function mutator = randomFrom( + r -> new XPackInfoResponse( + mutateBuildInfo(r.getBuildInfo()), + r.getLicenseInfo(), + r.getFeatureSetsInfo()), + r -> new XPackInfoResponse( + r.getBuildInfo(), + mutateLicenseInfo(r.getLicenseInfo()), + r.getFeatureSetsInfo()), + r -> new XPackInfoResponse( + r.getBuildInfo(), + r.getLicenseInfo(), + mutateFeatureSetsInfo(r.getFeatureSetsInfo()))); + return mutator.apply(response); + } + + private BuildInfo randomBuildInfo() { + return new BuildInfo( + randomAlphaOfLength(10), + randomAlphaOfLength(15)); + } + + private BuildInfo mutateBuildInfo(BuildInfo buildInfo) { + if (buildInfo == null) { + return randomBuildInfo(); + } + return null; + } + + private LicenseInfo randomLicenseInfo() { + return new LicenseInfo( + randomAlphaOfLength(10), + randomAlphaOfLength(4), + randomAlphaOfLength(5), + randomFrom(LicenseStatus.values()), + randomLong()); + } + + private LicenseInfo mutateLicenseInfo(LicenseInfo licenseInfo) { + if (licenseInfo == null) { + return randomLicenseInfo(); + } + return null; + } + + private FeatureSetsInfo randomFeatureSetsInfo() { + int size = between(0, 10); + Set featureSets = new HashSet<>(size); + while (featureSets.size() < size) { + featureSets.add(randomFeatureSet()); + } + return new FeatureSetsInfo(featureSets); + } + + private FeatureSetsInfo mutateFeatureSetsInfo(FeatureSetsInfo featureSetsInfo) { + if (featureSetsInfo == null) { + return randomFeatureSetsInfo(); + } + return null; + } + + private FeatureSet randomFeatureSet() { + return new FeatureSet( + randomAlphaOfLength(5), + randomBoolean() ? null : randomAlphaOfLength(20), + randomBoolean(), + randomBoolean(), + randomNativeCodeInfo()); + } + + private Map randomNativeCodeInfo() { + if (randomBoolean()) { + return null; + } + int size = between(0, 10); + Map nativeCodeInfo = new HashMap<>(size); + while (nativeCodeInfo.size() < size) { + nativeCodeInfo.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); + } + return nativeCodeInfo; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java new file mode 100644 index 0000000000000..c4e29d7c23005 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/common/ProtocolUtilsTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.common; + +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +public class ProtocolUtilsTests extends ESTestCase { + + public void testMapStringEqualsAndHash() { + assertTrue(ProtocolUtils.equals(null, null)); + assertFalse(ProtocolUtils.equals(null, new HashMap<>())); + assertFalse(ProtocolUtils.equals(new HashMap<>(), null)); + + Map a = new HashMap<>(); + a.put("foo", new String[] { "a", "b" }); + a.put("bar", new String[] { "b", "c" }); + + Map b = new HashMap<>(); + b.put("foo", new String[] { "a", "b" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("bar", new String[] { "c", "b" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("bar", new String[] { "b", "c" }); + + assertTrue(ProtocolUtils.equals(a, b)); + assertTrue(ProtocolUtils.equals(b, a)); + assertEquals(ProtocolUtils.hashCode(a), ProtocolUtils.hashCode(b)); + + b.put("baz", new String[] { "b", "c" }); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + a.put("non", null); + + assertFalse(ProtocolUtils.equals(a, b)); + assertFalse(ProtocolUtils.equals(b, a)); + + b.put("non", null); + b.remove("baz"); + + assertTrue(ProtocolUtils.equals(a, b)); + assertTrue(ProtocolUtils.equals(b, a)); + assertEquals(ProtocolUtils.hashCode(a), ProtocolUtils.hashCode(b)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java new file mode 100644 index 0000000000000..3226198191cef --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/graph/GraphExploreResponseTests.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.graph; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ShardOperationFailedException; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; + +public class GraphExploreResponseTests extends AbstractXContentTestCase< GraphExploreResponse> { + + @Override + protected GraphExploreResponse createTestInstance() { + return createInstance(0); + } + private static GraphExploreResponse createInstance(int numFailures) { + int numItems = randomIntBetween(4, 128); + boolean timedOut = randomBoolean(); + boolean showDetails = randomBoolean(); + long overallTookInMillis = randomNonNegativeLong(); + Map vertices = new HashMap<>(); + Map connections = new HashMap<>(); + ShardOperationFailedException [] failures = new ShardOperationFailedException [numFailures]; + for (int i = 0; i < failures.length; i++) { + failures[i] = new ShardSearchFailure(new ElasticsearchException("an error")); + } + + //Create random set of vertices + for (int i = 0; i < numItems; i++) { + Vertex v = new Vertex("field1", randomAlphaOfLength(5), randomDouble(), 0, + showDetails?randomIntBetween(100, 200):0, + showDetails?randomIntBetween(1, 100):0); + vertices.put(v.getId(), v); + } + + //Wire up half the vertices randomly + Vertex[] vs = vertices.values().toArray(new Vertex[vertices.size()]); + for (int i = 0; i < numItems/2; i++) { + Vertex v1 = vs[randomIntBetween(0, vs.length-1)]; + Vertex v2 = vs[randomIntBetween(0, vs.length-1)]; + if(v1 != v2) { + Connection conn = new Connection(v1, v2, randomDouble(), randomLongBetween(1, 10)); + connections.put(conn.getId(), conn); + } + } + return new GraphExploreResponse(overallTookInMillis, timedOut, failures, vertices, connections, showDetails); + } + + + @Override + protected String[] getShuffleFieldsExceptions() { + return new String[]{"vertices"}; + } + + private static GraphExploreResponse createTestInstanceWithFailures() { + return createInstance(randomIntBetween(1, 128)); + } + + @Override + protected GraphExploreResponse doParseInstance(XContentParser parser) throws IOException { + return GraphExploreResponse.fromXContext(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + protected Predicate getRandomFieldsExcludeFilterWhenResultHasErrors() { + return field -> field.startsWith("responses"); + } + + @Override + protected void assertEqualInstances( GraphExploreResponse expectedInstance, GraphExploreResponse newInstance) { + assertThat(newInstance.getTook(), equalTo(expectedInstance.getTook())); + assertThat(newInstance.isTimedOut(), equalTo(expectedInstance.isTimedOut())); + + Connection[] newConns = newInstance.getConnections().toArray(new Connection[0]); + Connection[] expectedConns = expectedInstance.getConnections().toArray(new Connection[0]); + assertArrayEquals(expectedConns, newConns); + + Vertex[] newVertices = newInstance.getVertices().toArray(new Vertex[0]); + Vertex[] expectedVertices = expectedInstance.getVertices().toArray(new Vertex[0]); + assertArrayEquals(expectedVertices, newVertices); + + ShardOperationFailedException[] newFailures = newInstance.getShardFailures(); + ShardOperationFailedException[] expectedFailures = expectedInstance.getShardFailures(); + assertEquals(expectedFailures.length, newFailures.length); + + } + + /** + * Test parsing {@link GraphExploreResponse} with inner failures as they don't support asserting on xcontent equivalence, given + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier< GraphExploreResponse> instanceSupplier = GraphExploreResponseTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY, + getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java new file mode 100644 index 0000000000000..7149477d00765 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.io.IOException; + +import org.elasticsearch.test.ESTestCase; + +public class LicenseStatusTests extends ESTestCase { + public void testSerialization() throws IOException { + LicenseStatus status = randomFrom(LicenseStatus.values()); + assertSame(status, copyWriteable(status, writableRegistry(), LicenseStatus::readFrom)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java new file mode 100644 index 0000000000000..a09fd6fb99b45 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/license/PutLicenseResponseTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.license; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; + +public class PutLicenseResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // The structure of the response is such that unknown fields inside acknowledge cannot be supported since they + // are treated as messages from new services + return p -> p.startsWith("acknowledge"); + } + + @Override + protected PutLicenseResponse createTestInstance() { + boolean acknowledged = randomBoolean(); + LicensesStatus status = randomFrom(LicensesStatus.VALID, LicensesStatus.INVALID, LicensesStatus.EXPIRED); + String messageHeader; + Map ackMessages; + if (randomBoolean()) { + messageHeader = randomAlphaOfLength(10); + ackMessages = randomAckMessages(); + } else { + messageHeader = null; + ackMessages = Collections.emptyMap(); + } + + return new PutLicenseResponse(acknowledged, status, messageHeader, ackMessages); + } + + private static Map randomAckMessages() { + int nFeatures = randomIntBetween(1, 5); + + Map ackMessages = new HashMap<>(); + + for (int i = 0; i < nFeatures; i++) { + String feature = randomAlphaOfLengthBetween(9, 15); + int nMessages = randomIntBetween(1, 5); + String[] messages = new String[nMessages]; + for (int j = 0; j < nMessages; j++) { + messages[j] = randomAlphaOfLengthBetween(10, 30); + } + ackMessages.put(feature, messages); + } + + return ackMessages; + } + + @Override + protected PutLicenseResponse doParseInstance(XContentParser parser) throws IOException { + return PutLicenseResponse.fromXContent(parser); + } + + @Override + protected PutLicenseResponse createBlankInstance() { + return new PutLicenseResponse(); + } + + @Override + protected PutLicenseResponse mutateInstance(PutLicenseResponse response) { + @SuppressWarnings("unchecked") + Function mutator = randomFrom( + r -> new PutLicenseResponse( + r.isAcknowledged() == false, + r.status(), + r.acknowledgeHeader(), + r.acknowledgeMessages()), + r -> new PutLicenseResponse( + r.isAcknowledged(), + mutateStatus(r.status()), + r.acknowledgeHeader(), + r.acknowledgeMessages()), + r -> { + if (r.acknowledgeMessages().isEmpty()) { + return new PutLicenseResponse( + r.isAcknowledged(), + r.status(), + randomAlphaOfLength(10), + randomAckMessages() + ); + } else { + return new PutLicenseResponse(r.isAcknowledged(), r.status()); + } + } + + ); + return mutator.apply(response); + } + + private LicensesStatus mutateStatus(LicensesStatus status) { + return randomValueOtherThan(status, () -> randomFrom(LicensesStatus.values())); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java new file mode 100644 index 0000000000000..ba87cca84ee85 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoRequestTests.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.test.AbstractStreamableTestCase; + +public class IndexUpgradeInfoRequestTests extends AbstractStreamableTestCase { + @Override + protected IndexUpgradeInfoRequest createTestInstance() { + int indexCount = randomInt(4); + String[] indices = new String[indexCount]; + for (int i = 0; i < indexCount; i++) { + indices[i] = randomAlphaOfLength(10); + } + IndexUpgradeInfoRequest request = new IndexUpgradeInfoRequest(indices); + if (randomBoolean()) { + request.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); + } + return request; + } + + + public void testNullIndices() { + expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest((String[])null)); + expectThrows(NullPointerException.class, () -> new IndexUpgradeInfoRequest().indices((String[])null)); + } + + @Override + protected IndexUpgradeInfoRequest createBlankInstance() { + return new IndexUpgradeInfoRequest(); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java new file mode 100644 index 0000000000000..57f01a4454e02 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/migration/IndexUpgradeInfoResponseTests.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.migration; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +public class IndexUpgradeInfoResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected IndexUpgradeInfoResponse doParseInstance(XContentParser parser) { + return IndexUpgradeInfoResponse.fromXContent(parser); + } + + @Override + protected IndexUpgradeInfoResponse createBlankInstance() { + return new IndexUpgradeInfoResponse(); + } + + @Override + protected IndexUpgradeInfoResponse createTestInstance() { + return randomIndexUpgradeInfoResponse(randomIntBetween(0, 10)); + } + + private static IndexUpgradeInfoResponse randomIndexUpgradeInfoResponse(int numIndices) { + Map actions = new HashMap<>(); + for (int i = 0; i < numIndices; i++) { + actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values())); + } + return new IndexUpgradeInfoResponse(actions); + } + + @Override + protected IndexUpgradeInfoResponse mutateInstance(IndexUpgradeInfoResponse instance) { + if (instance.getActions().size() == 0) { + return randomIndexUpgradeInfoResponse(1); + } + Map actions = new HashMap<>(instance.getActions()); + if (randomBoolean()) { + Iterator> iterator = actions.entrySet().iterator(); + iterator.next(); + iterator.remove(); + } else { + actions.put(randomAlphaOfLength(5), randomFrom(UpgradeActionRequired.values())); + } + return new IndexUpgradeInfoResponse(actions); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java new file mode 100644 index 0000000000000..28a27e639985d --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/security/UserTests.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.security; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +import static org.hamcrest.Matchers.is; + +public class UserTests extends ESTestCase { + + public void testUserToString() { + User user = new User("u1", "r1"); + assertThat(user.toString(), is("User[username=u1,roles=[r1],fullName=null,email=null,metadata={}]")); + user = new User("u1", new String[] { "r1", "r2" }, "user1", "user1@domain.com", Collections.singletonMap("key", "val"), true); + assertThat(user.toString(), is("User[username=u1,roles=[r1,r2],fullName=user1,email=user1@domain.com,metadata={key=val}]")); + user = new User("u1", new String[] {"r1"}, new User("u2", "r2", "r3")); + assertThat(user.toString(), is("User[username=u1,roles=[r1],fullName=null,email=null,metadata={}," + + "authenticatedUser=[User[username=u2,roles=[r2,r3],fullName=null,email=null,metadata={}]]]")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java new file mode 100644 index 0000000000000..209bc790a8c54 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/DeleteWatchResponseTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class DeleteWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected DeleteWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean found = randomBoolean(); + return new DeleteWatchResponse(id, version, found); + } + + @Override + protected DeleteWatchResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java new file mode 100644 index 0000000000000..1fc2f61b684c7 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/protocol/xpack/watcher/PutWatchResponseTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.protocol.xpack.watcher; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PutWatchResponseTests extends AbstractXContentTestCase { + + @Override + protected PutWatchResponse createTestInstance() { + String id = randomAlphaOfLength(10); + long version = randomLongBetween(1, 10); + boolean created = randomBoolean(); + return new PutWatchResponse(id, version, created); + } + + @Override + protected PutWatchResponse doParseInstance(XContentParser parser) throws IOException { + return PutWatchResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +}