Skip to content

Commit

Permalink
AD model performance benchmark
Browse files Browse the repository at this point in the history
This PR adds an AD model performance benchmark so that we can compare model performance across versions.

For the single stream detector, we refactored tests in DetectionResultEvalutationIT and moved it to SingleStreamModelPerfIT.

For the HCAD detector, we randomly generated synthetic data with known anomalies inserted throughout the signal. In particular, these are one/two/four dimensional data where each dimension is a noisy cosine wave. Anomalies are inserted into one dimension with 0.003 probability. Anomalies across each dimension can be independent or dependent. We have approximately 5000 observations per data set. The data set is generated using the same random seed so the result is comparable across versions.

We also backported opensearch-project#600 so that we can capture the performance data in CI output.

We also fixed opensearch-project#712 by revising the client setup code.

Testing done:
* added unit tests to run the benchmark.

Signed-off-by: Kaituo Li <kaituo@amazon.com>
  • Loading branch information
kaituo committed Nov 14, 2022
1 parent 7cefb14 commit 1e01657
Show file tree
Hide file tree
Showing 12 changed files with 1,180 additions and 630 deletions.
33 changes: 33 additions & 0 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: Build and Test Anomaly detection
on:
push:
branches:
- "*"
pull_request:
branches:
- "*"

jobs:
Build-ad:
strategy:
matrix:
java: [17]
fail-fast: false

name: Build and Test Anomaly detection Plugin
runs-on: ubuntu-latest

steps:
- name: Setup Java ${{ matrix.java }}
uses: actions/setup-java@v1
with:
java-version: ${{ matrix.java }}

# anomaly-detection
- name: Checkout AD
uses: actions/checkout@v2

- name: Build and Run Tests
run: |
./gradlew ':test' --tests "org.opensearch.ad.ml.HCADModelPerfTests" -Dtests.seed=2AEBDBBAE75AC5E0 -Dtests.security.manager=false -Dtests.locale=es-CU -Dtests.timezone=Chile/EasterIsland -Dtest.logs=true -Dmodel-benchmark=true
./gradlew integTest --tests "org.opensearch.ad.e2e.SingleStreamModelPerfIT" -Dtests.seed=60CDDB34427ACD0C -Dtests.security.manager=false -Dtests.locale=kab-DZ -Dtests.timezone=Asia/Hebron -Dtest.logs=true -Dmodel-benchmark=true
2 changes: 1 addition & 1 deletion .github/workflows/test_build_multi_platform.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ jobs:
./gradlew assemble
- name: Build and Run Tests
run: |
./gradlew build -Dtest.logs=true
./gradlew build
- name: Publish to Maven Local
run: |
./gradlew publishToMavenLocal
Expand Down
20 changes: 16 additions & 4 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ configurations.all {
if (it.state != Configuration.State.UNRESOLVED) return
resolutionStrategy {
force "joda-time:joda-time:${versions.joda}"
force "com.fasterxml.jackson.core:jackson-core:2.13.4"
force "com.fasterxml.jackson.core:jackson-core:2.14.0"
force "commons-logging:commons-logging:${versions.commonslogging}"
force "org.apache.httpcomponents:httpcore5:${versions.httpcore5}"
force "commons-codec:commons-codec:${versions.commonscodec}"
Expand Down Expand Up @@ -219,6 +219,12 @@ test {
}
include '**/*Tests.class'
systemProperty 'tests.security.manager', 'false'

if (System.getProperty("model-benchmark") == null || System.getProperty("model-benchmark") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.ml.HCADModelPerfTests"
}
}
}

task integTest(type: RestIntegTestTask) {
Expand Down Expand Up @@ -264,6 +270,12 @@ integTest {
}
}

if (System.getProperty("model-benchmark") == null || System.getProperty("model-benchmark") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.e2e.SingleStreamModelPerfIT"
}
}

// The 'doFirst' delays till execution time.
doFirst {
// Tell the test JVM if the cluster JVM is running under a debugger so that tests can
Expand Down Expand Up @@ -664,9 +676,9 @@ dependencies {
implementation 'software.amazon.randomcutforest:randomcutforest-core:3.0-rc3'

// force Jackson version to avoid version conflict issue
implementation "com.fasterxml.jackson.core:jackson-core:2.13.4"
implementation "com.fasterxml.jackson.core:jackson-databind:2.13.4.2"
implementation "com.fasterxml.jackson.core:jackson-annotations:2.13.4"
implementation "com.fasterxml.jackson.core:jackson-core:2.14.0"
implementation "com.fasterxml.jackson.core:jackson-databind:2.14.0"
implementation "com.fasterxml.jackson.core:jackson-annotations:2.14.0"

// used for serializing/deserializing rcf models.
implementation group: 'io.protostuff', name: 'protostuff-core', version: '1.8.0'
Expand Down
23 changes: 13 additions & 10 deletions src/test/java/org/opensearch/ad/ODFERestTestCase.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@

package org.opensearch.ad;

import static org.opensearch.client.RestClientBuilder.DEFAULT_MAX_CONN_PER_ROUTE;
import static org.opensearch.client.RestClientBuilder.DEFAULT_MAX_CONN_TOTAL;
import static org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_ENABLED;
import static org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_FILEPATH;
import static org.opensearch.commons.ConfigConstants.OPENSEARCH_SECURITY_SSL_HTTP_KEYSTORE_KEYPASSWORD;
Expand Down Expand Up @@ -186,21 +188,18 @@ protected static void configureHttpsClient(RestClientBuilder builder, Settings s
.ofNullable(System.getProperty("password"))
.orElseThrow(() -> new RuntimeException("password is missing"));
BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider
.setCredentials(
new AuthScope(new HttpHost("localhost", 9200)),
new UsernamePasswordCredentials(userName, password.toCharArray())
);
final AuthScope anyScope = new AuthScope(null, -1);
credentialsProvider.setCredentials(anyScope, new UsernamePasswordCredentials(userName, password.toCharArray()));
try {
final TlsStrategy tlsStrategy = ClientTlsStrategyBuilder
.create()
.setSslContext(SSLContextBuilder.create().loadTrustMaterial(null, (chains, authType) -> true).build())
// disable the certificate since our testing cluster just uses the default security configuration
.setHostnameVerifier(NoopHostnameVerifier.INSTANCE)
.setSslContext(SSLContextBuilder.create().loadTrustMaterial(null, (chains, authType) -> true).build())
.build();

final PoolingAsyncClientConnectionManager connectionManager = PoolingAsyncClientConnectionManagerBuilder
.create()
.setMaxConnPerRoute(DEFAULT_MAX_CONN_PER_ROUTE)
.setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL)
.setTlsStrategy(tlsStrategy)
.build();
return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider).setConnectionManager(connectionManager);
Expand All @@ -212,8 +211,12 @@ protected static void configureHttpsClient(RestClientBuilder builder, Settings s
final String socketTimeoutString = settings.get(CLIENT_SOCKET_TIMEOUT);
final TimeValue socketTimeout = TimeValue
.parseTimeValue(socketTimeoutString == null ? "60s" : socketTimeoutString, CLIENT_SOCKET_TIMEOUT);
builder
.setRequestConfigCallback(conf -> conf.setResponseTimeout(Timeout.ofMilliseconds(Math.toIntExact(socketTimeout.getMillis()))));
builder.setRequestConfigCallback(conf -> {
Timeout timeout = Timeout.ofMilliseconds(Math.toIntExact(socketTimeout.getMillis()));
conf.setConnectTimeout(timeout);
conf.setResponseTimeout(timeout);
return conf;
});
if (settings.hasValue(CLIENT_PATH_PREFIX)) {
builder.setPathPrefix(settings.get(CLIENT_PATH_PREFIX));
}
Expand Down
242 changes: 242 additions & 0 deletions src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,242 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/

package org.opensearch.ad.e2e;

import static org.opensearch.ad.TestHelpers.toHttpEntity;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE;

import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;

import org.apache.hc.core5.http.HttpHeaders;
import org.apache.hc.core5.http.message.BasicHeader;
import org.opensearch.ad.ODFERestTestCase;
import org.opensearch.ad.TestHelpers;
import org.opensearch.client.Request;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.Response;
import org.opensearch.client.RestClient;
import org.opensearch.client.WarningsHandler;
import org.opensearch.common.Strings;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.json.JsonXContent;

import com.google.common.collect.ImmutableList;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;

public class AbstractSyntheticDataTest extends ODFERestTestCase {
/**
* In real time AD, we mute a node for a detector if that node keeps returning
* ResourceNotFoundException (5 times in a row). This is a problem for batch mode
* testing as we issue a large amount of requests quickly. Due to the speed, we
* won't be able to finish cold start before the ResourceNotFoundException mutes
* a node. Since our test case has only one node, there is no other nodes to fall
* back on. Here we disable such fault tolerance by setting max retries before
* muting to a large number and the actual wait time during muting to 0.
*
* @throws IOException when failing to create http request body
*/
protected void disableResourceNotFoundFaultTolerence() throws IOException {
XContentBuilder settingCommand = JsonXContent.contentBuilder();

settingCommand.startObject();
settingCommand.startObject("persistent");
settingCommand.field(MAX_RETRY_FOR_UNRESPONSIVE_NODE.getKey(), 100_000);
settingCommand.field(BACKOFF_MINUTES.getKey(), 0);
settingCommand.endObject();
settingCommand.endObject();
Request request = new Request("PUT", "/_cluster/settings");
request.setJsonEntity(Strings.toString(settingCommand));

adminClient().performRequest(request);
}

protected List<JsonObject> getData(String datasetFileName) throws Exception {
JsonArray jsonArray = JsonParser
.parseReader(new FileReader(new File(getClass().getResource(datasetFileName).toURI()), Charset.defaultCharset()))
.getAsJsonArray();
List<JsonObject> list = new ArrayList<>(jsonArray.size());
jsonArray.iterator().forEachRemaining(i -> list.add(i.getAsJsonObject()));
return list;
}

protected Map<String, Object> getDetectionResult(String detectorId, Instant begin, Instant end, RestClient client) {
try {
Request request = new Request(
"POST",
String.format(Locale.ROOT, "/_opendistro/_anomaly_detection/detectors/%s/_run", detectorId)
);
request
.setJsonEntity(
String.format(Locale.ROOT, "{ \"period_start\": %d, \"period_end\": %d }", begin.toEpochMilli(), end.toEpochMilli())
);
return entityAsMap(client.performRequest(request));
} catch (Exception e) {
throw new RuntimeException(e);
}
}

protected void bulkIndexTrainData(
String datasetName,
List<JsonObject> data,
int trainTestSplit,
RestClient client,
String categoryField
) throws Exception {
Request request = new Request("PUT", datasetName);
String requestBody = null;
if (Strings.isEmpty(categoryField)) {
requestBody = "{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"},"
+ " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" } } } }";
} else {
requestBody = String
.format(
Locale.ROOT,
"{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"},"
+ " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" },"
+ "\"%s\": { \"type\": \"keyword\"} } } }",
categoryField
);
}

request.setJsonEntity(requestBody);
setWarningHandler(request, false);
client.performRequest(request);
Thread.sleep(1_000);

StringBuilder bulkRequestBuilder = new StringBuilder();
for (int i = 0; i < trainTestSplit; i++) {
bulkRequestBuilder.append("{ \"index\" : { \"_index\" : \"" + datasetName + "\", \"_id\" : \"" + i + "\" } }\n");
bulkRequestBuilder.append(data.get(i).toString()).append("\n");
}
TestHelpers
.makeRequest(
client,
"POST",
"_bulk?refresh=true",
null,
toHttpEntity(bulkRequestBuilder.toString()),
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
Thread.sleep(1_000);
waitAllSyncheticDataIngested(trainTestSplit, datasetName, client);
}

protected String createDetector(
String datasetName,
int intervalMinutes,
RestClient client,
String categoryField,
long windowDelayInMins
) throws Exception {
Request request = new Request("POST", "/_plugins/_anomaly_detection/detectors/");
String requestBody = null;
if (Strings.isEmpty(categoryField)) {
requestBody = String
.format(
Locale.ROOT,
"{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\""
+ ", \"indices\": [\"%s\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": "
+ "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\""
+ ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": "
+ "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }, "
+ "\"window_delay\": { \"period\": {\"interval\": %d, \"unit\": \"MINUTES\"}},"
+ "\"schema_version\": 0 }",
datasetName,
intervalMinutes,
windowDelayInMins
);
} else {
requestBody = String
.format(
Locale.ROOT,
"{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\""
+ ", \"indices\": [\"%s\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": "
+ "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\""
+ ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": "
+ "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }, "
+ "\"category_field\": [\"%s\"], "
+ "\"window_delay\": { \"period\": {\"interval\": %d, \"unit\": \"MINUTES\"}},"
+ "\"schema_version\": 0 }",
datasetName,
intervalMinutes,
categoryField,
windowDelayInMins
);
}

request.setJsonEntity(requestBody);
Map<String, Object> response = entityAsMap(client.performRequest(request));
String detectorId = (String) response.get("_id");
Thread.sleep(1_000);
return detectorId;
}

protected void waitAllSyncheticDataIngested(int expectedSize, String datasetName, RestClient client) throws Exception {
int maxWaitCycles = 3;
do {
Request request = new Request("POST", String.format(Locale.ROOT, "/%s/_search", datasetName));
request
.setJsonEntity(
String
.format(
Locale.ROOT,
"{\"query\": {"
+ " \"match_all\": {}"
+ " },"
+ " \"size\": 1,"
+ " \"sort\": ["
+ " {"
+ " \"timestamp\": {"
+ " \"order\": \"desc\""
+ " }"
+ " }"
+ " ]}"
)
);
// Make sure all of the test data has been ingested
// Expected response:
// "_index":"synthetic","_type":"_doc","_id":"10080","_score":null,"_source":{"timestamp":"2019-11-08T00:00:00Z","Feature1":156.30028000000001,"Feature2":100.211205,"host":"host1"},"sort":[1573171200000]}
Response response = client.performRequest(request);
JsonObject json = JsonParser
.parseReader(new InputStreamReader(response.getEntity().getContent(), Charset.defaultCharset()))
.getAsJsonObject();
JsonArray hits = json.getAsJsonObject("hits").getAsJsonArray("hits");
if (hits != null
&& hits.size() == 1
&& expectedSize - 1 == hits.get(0).getAsJsonObject().getAsJsonPrimitive("_id").getAsLong()) {
break;
} else {
request = new Request("POST", String.format(Locale.ROOT, "/%s/_refresh", datasetName));
client.performRequest(request);
}
Thread.sleep(1_000);
} while (maxWaitCycles-- >= 0);
}

protected void setWarningHandler(Request request, boolean strictDeprecationMode) {
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(strictDeprecationMode ? WarningsHandler.STRICT : WarningsHandler.PERMISSIVE);
request.setOptions(options.build());
}
}
Loading

0 comments on commit 1e01657

Please sign in to comment.