Skip to content

EQL: case sensitivity aware integration testing #58624

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 17 commits into from
Jun 29, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ public class EqlSearchRequest implements Validatable, ToXContentObject {
private String timestampField = "@timestamp";
private String eventCategoryField = "event.category";
private String implicitJoinKeyField = "agent.id";
private boolean isCaseSensitive = true;

private int fetchSize = 50;
private SearchAfterBuilder searchAfterBuilder;
private String query;
Expand All @@ -56,6 +58,7 @@ public class EqlSearchRequest implements Validatable, ToXContentObject {
static final String KEY_TIEBREAKER_FIELD = "tiebreaker_field";
static final String KEY_EVENT_CATEGORY_FIELD = "event_category_field";
static final String KEY_IMPLICIT_JOIN_KEY_FIELD = "implicit_join_key_field";
static final String KEY_CASE_SENSITIVE = "case_sensitive";
static final String KEY_SIZE = "size";
static final String KEY_SEARCH_AFTER = "search_after";
static final String KEY_QUERY = "query";
Expand Down Expand Up @@ -88,6 +91,8 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par
builder.array(KEY_SEARCH_AFTER, searchAfterBuilder.getSortValues());
}

builder.field(KEY_CASE_SENSITIVE, isCaseSensitive());

builder.field(KEY_QUERY, query);
if (waitForCompletionTimeout != null) {
builder.field(KEY_WAIT_FOR_COMPLETION_TIMEOUT, waitForCompletionTimeout);
Expand Down Expand Up @@ -152,6 +157,15 @@ public String implicitJoinKeyField() {
return this.implicitJoinKeyField;
}

public boolean isCaseSensitive() {
return this.isCaseSensitive;
}

public EqlSearchRequest isCaseSensitive(boolean isCaseSensitive) {
this.isCaseSensitive = isCaseSensitive;
return this;
}

public EqlSearchRequest implicitJoinKeyField(String implicitJoinKeyField) {
Objects.requireNonNull(implicitJoinKeyField, "implicit join key must not be null");
this.implicitJoinKeyField = implicitJoinKeyField;
Expand Down Expand Up @@ -242,6 +256,7 @@ public boolean equals(Object o) {
Objects.equals(implicitJoinKeyField, that.implicitJoinKeyField) &&
Objects.equals(searchAfterBuilder, that.searchAfterBuilder) &&
Objects.equals(query, that.query) &&
Objects.equals(isCaseSensitive, that.isCaseSensitive) &&
Objects.equals(waitForCompletionTimeout, that.waitForCompletionTimeout) &&
Objects.equals(keepAlive, that.keepAlive) &&
Objects.equals(keepOnCompletion, that.keepOnCompletion);
Expand All @@ -255,11 +270,12 @@ public int hashCode() {
filter,
fetchSize,
timestampField,
tiebreakerField,
tiebreakerField,
eventCategoryField,
implicitJoinKeyField,
searchAfterBuilder,
query,
isCaseSensitive,
waitForCompletionTimeout,
keepAlive,
keepOnCompletion);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,118 +9,64 @@
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;

import org.elasticsearch.Build;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.EqlClient;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.eql.EqlSearchRequest;
import org.elasticsearch.client.eql.EqlSearchResponse;
import org.elasticsearch.client.eql.EqlSearchResponse.Hits;
import org.elasticsearch.client.eql.EqlSearchResponse.Sequence;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;

import static java.util.stream.Collectors.toList;
import static org.hamcrest.Matchers.instanceOf;
import static org.elasticsearch.test.eql.DataLoader.testIndexName;

public abstract class CommonEqlActionTestCase extends ESRestTestCase {

private RestHighLevelClient highLevelClient;

static final String indexPrefix = "endgame";
static final String testIndexName = indexPrefix + "-1.4.0";
protected static final String PARAM_FORMATTING = "%1$s.test -> %2$s";
private RestHighLevelClient highLevelClient;

@BeforeClass
public static void checkForSnapshot() {
assumeTrue("Only works on snapshot builds for now", Build.CURRENT.isSnapshot());
}

private static boolean isSetUp = false;
private static int counter = 0;

@SuppressWarnings("unchecked")
private static void setupData(CommonEqlActionTestCase tc) throws Exception {
if (isSetUp) {
return;
}

CreateIndexRequest request = new CreateIndexRequest(testIndexName)
.mapping(Streams.readFully(CommonEqlActionTestCase.class.getResourceAsStream("/mapping-default.json")),
XContentType.JSON);

tc.highLevelClient().indices().create(request, RequestOptions.DEFAULT);

BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);

try (XContentParser parser = tc.createParser(JsonXContent.jsonXContent,
CommonEqlActionTestCase.class.getResourceAsStream("/test_data.json"))) {
List<Object> list = parser.list();
for (Object item : list) {
assertThat(item, instanceOf(HashMap.class));
Map<String, Object> entry = (Map<String, Object>) item;
bulk.add(new IndexRequest(testIndexName).source(entry, XContentType.JSON));
}
}

if (bulk.numberOfActions() > 0) {
BulkResponse bulkResponse = tc.highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
assertEquals(RestStatus.OK, bulkResponse.status());
assertFalse(bulkResponse.hasFailures());
isSetUp = true;
}
}

private static void cleanupData(CommonEqlActionTestCase tc) throws Exception {
// Delete index after all tests ran
if (isSetUp && (--counter == 0)) {
deleteIndex(testIndexName);
isSetUp = false;
}
}

@Override
protected boolean preserveClusterUponCompletion() {
// Need to preserve data between parameterized tests runs
return true;
}

@Before
public void setup() throws Exception {
setupData(this);
if (client().performRequest(new Request("HEAD", "/" + testIndexName)).getStatusLine().getStatusCode() == 404) {
DataLoader.loadDatasetIntoEs(highLevelClient(), (t, u) -> createParser(t, u));
}
}

@After
public void cleanup() throws Exception {
cleanupData(this);
@AfterClass
public static void cleanup() throws Exception {
try {
adminClient().performRequest(new Request("DELETE", "/*"));
} catch (ResponseException e) {
// 404 here just means we had no indexes
if (e.getResponse().getStatusLine().getStatusCode() != 404) {
throw e;
}
}
}

@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING)
public static List<Object[]> readTestSpecs() throws Exception {

// Load EQL validation specs
List<EqlSpec> specs = EqlSpecLoader.load("/test_queries.toml", true);
specs.addAll(EqlSpecLoader.load("/test_queries_supported.toml", true));
specs.addAll(EqlSpecLoader.load("/additional_test_queries.toml", true));
List<EqlSpec> unsupportedSpecs = EqlSpecLoader.load("/test_queries_unsupported.toml", false);

// Validate only currently supported specs
Expand All @@ -130,7 +76,7 @@ public static List<Object[]> readTestSpecs() throws Exception {
boolean supported = true;
// Check if spec is supported, simple iteration, cause the list is short.
for (EqlSpec unSpec : unsupportedSpecs) {
if (spec.query() != null && spec.query().equals(unSpec.query())) {
if (spec.equals(unSpec)) {
supported = false;
break;
}
Expand All @@ -140,7 +86,6 @@ public static List<Object[]> readTestSpecs() throws Exception {
filteredSpecs.add(spec);
}
}
counter = specs.size();
return asArray(filteredSpecs);
}

Expand Down Expand Up @@ -170,7 +115,19 @@ public CommonEqlActionTestCase(int num, String name, EqlSpec spec) {
}

public void test() throws Exception {
assertResponse(runQuery(testIndexName, spec.query()));
// run both tests if case sensitivity doesn't matter
if (spec.caseSensitive() == null) {
assertResponse(runQuery(testIndexName, spec.query(), true));
assertResponse(runQuery(testIndexName, spec.query(), false));
}
// run only the case sensitive test
else if (spec.caseSensitive()) {
assertResponse(runQuery(testIndexName, spec.query(), true));
}
// run only the case insensitive test
else {
assertResponse(runQuery(testIndexName, spec.query(), false));
}
}

protected void assertResponse(EqlSearchResponse response) {
Expand All @@ -186,14 +143,28 @@ else if (hits.sequences() != null) {
}
}

protected EqlSearchResponse runQuery(String index, String query) throws Exception {
protected EqlSearchResponse runQuery(String index, String query, boolean isCaseSensitive) throws Exception {
EqlSearchRequest request = new EqlSearchRequest(testIndexName, query);
request.isCaseSensitive(isCaseSensitive);
request.tiebreakerField("event.sequence");
return eqlClient().search(request, RequestOptions.DEFAULT);
return highLevelClient().eql().search(request, RequestOptions.DEFAULT);
}

protected EqlClient eqlClient() {
return highLevelClient().eql();
private RestHighLevelClient highLevelClient() {
if (highLevelClient == null) {
highLevelClient = new RestHighLevelClient(
client(),
ignore -> {
},
Collections.emptyList()) {
};
}
return highLevelClient;
}

protected void assertSearchHits(List<SearchHit> events) {
assertNotNull(events);
assertArrayEquals("unexpected result for spec: [" + spec.toString() + "]", spec.expectedEventIds(), extractIds(events));
}

private static long[] extractIds(List<SearchHit> events) {
Expand All @@ -205,27 +176,16 @@ private static long[] extractIds(List<SearchHit> events) {
return ids;
}

protected void assertSearchHits(List<SearchHit> events) {
assertNotNull(events);
assertArrayEquals("unexpected result for spec: [" + spec.toString() + "]", spec.expectedEventIds(), extractIds(events));
}

protected void assertSequences(List<Sequence> sequences) {
List<SearchHit> events = sequences.stream()
.flatMap(s -> s.events().stream())
.collect(toList());
assertSearchHits(events);
}

private RestHighLevelClient highLevelClient() {
if (highLevelClient == null) {
highLevelClient = new RestHighLevelClient(
client(),
ignore -> {
},
List.of()) {
};
}
return highLevelClient;
@Override
protected boolean preserveClusterUponCompletion() {
// Need to preserve data between parameterized tests runs
return true;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.test.eql;

import org.apache.http.HttpHost;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.common.CheckedBiFunction;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;

import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;

public class DataLoader {

private static final String TEST_DATA = "/test_data.json";
private static final String MAPPING = "/mapping-default.json";
static final String indexPrefix = "endgame";
static final String testIndexName = indexPrefix + "-1.4.0";

public static void main(String[] args) throws IOException {
try (RestClient client = RestClient.builder(new HttpHost("localhost", 9200)).build()) {
loadDatasetIntoEs(new RestHighLevelClient(
client,
ignore -> {
},
List.of()) {
}, (t, u) -> createParser(t, u));
}
}

@SuppressWarnings("unchecked")
protected static void loadDatasetIntoEs(RestHighLevelClient client,
CheckedBiFunction<XContent, InputStream, XContentParser, IOException> p) throws IOException {

CreateIndexRequest request = new CreateIndexRequest(testIndexName)
.mapping(Streams.readFully(DataLoader.class.getResourceAsStream(MAPPING)), XContentType.JSON);

client.indices().create(request, RequestOptions.DEFAULT);

BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);

try (XContentParser parser = p.apply(JsonXContent.jsonXContent, DataLoader.class.getResourceAsStream(TEST_DATA))) {
List<Object> list = parser.list();
for (Object item : list) {
bulk.add(new IndexRequest(testIndexName).source((Map<String, Object>) item, XContentType.JSON));
}
}

if (bulk.numberOfActions() > 0) {
BulkResponse bulkResponse = client.bulk(bulk, RequestOptions.DEFAULT);
if (bulkResponse.hasFailures()) {
LogManager.getLogger(DataLoader.class).info("Data FAILED loading");
} else {
LogManager.getLogger(DataLoader.class).info("Data loaded");
}
}
}

private static XContentParser createParser(XContent xContent, InputStream data) throws IOException {
NamedXContentRegistry contentRegistry = new NamedXContentRegistry(ClusterModule.getNamedXWriteables());
return xContent.createParser(contentRegistry, LoggingDeprecationHandler.INSTANCE, data);
}
}
Loading