Skip to content

Commit

Permalink
AD model performance benchmark
Browse files Browse the repository at this point in the history
This PR adds a HCAD model performance benchmark so that we can compare model performance across versions.

Regarding benchmark data, we randomly generated synthetic data with known anomalies inserted throughout the signal. In particular, these are one/two/four dimensional data where each dimension is a noisy cosine wave. Anomalies are inserted into one dimension with 0.003 probability. Anomalies across each dimension can be independent or dependent. We have approximately 5000 observations per data set. The data set is generated using the same random seed so the result is comparable across versions.

We also backported opensearch-project#600 so that we can capture the performance data in CI output.

Testing done:
* added unit tests to run the benchmark.

Signed-off-by: Kaituo Li <kaituo@amazon.com>
  • Loading branch information
kaituo committed Nov 15, 2022
1 parent df4c94e commit 21e8f42
Show file tree
Hide file tree
Showing 9 changed files with 1,153 additions and 612 deletions.
33 changes: 33 additions & 0 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: Run AD benchmark
on:
push:
branches:
- "*"
pull_request:
branches:
- "*"

jobs:
Build-ad:
strategy:
matrix:
java: [14]
fail-fast: false

name: Run Anomaly detection model performance benchmark
runs-on: ubuntu-latest

steps:
- name: Setup Java ${{ matrix.java }}
uses: actions/setup-java@v1
with:
java-version: ${{ matrix.java }}

# anomaly-detection
- name: Checkout AD
uses: actions/checkout@v2

- name: Build and Run Tests
run: |
./gradlew ':test' --tests "org.opensearch.ad.ml.HCADModelPerfTests" -Dtests.seed=2AEBDBBAE75AC5E0 -Dtests.security.manager=false -Dtests.locale=es-CU -Dtests.timezone=Chile/EasterIsland -Dtest.logs=true -Dmodel-benchmark=true
./gradlew integTest --tests "org.opensearch.ad.e2e.SingleStreamModelPerfIT" -Dtests.seed=60CDDB34427ACD0C -Dtests.security.manager=false -Dtests.locale=kab-DZ -Dtests.timezone=Asia/Hebron -Dtest.logs=true -Dmodel-benchmark=true
33 changes: 33 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ buildscript {
'opensearch-anomaly-detection-1.1.0.0.zip'
bwcOpenSearchJSDownload = 'https://ci.opensearch.org/ci/dbc/bundle-build/1.1.0/20210930/linux/x64/builds/opensearch/plugins/' +
'opensearch-job-scheduler-1.1.0.0.zip'
// gradle build won't print logs during test by default unless there is a failure.
// It is useful to record intermediately information like prediction precision and recall.
// This option turn on log printing during tests.
printLogs = "true" == System.getProperty("test.logs", "false")
}

repositories {
Expand Down Expand Up @@ -175,6 +179,12 @@ test {
}
include '**/*Tests.class'
systemProperty 'tests.security.manager', 'false'

if (System.getProperty("model-benchmark") == null || System.getProperty("model-benchmark") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.ml.HCADModelPerfTests"
}
}
}

task integTest(type: RestIntegTestTask) {
Expand Down Expand Up @@ -220,6 +230,12 @@ integTest {
}
}

if (System.getProperty("model-benchmark") == null || System.getProperty("model-benchmark") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.e2e.SingleStreamModelPerfIT"
}
}

// The 'doFirst' delays till execution time.
doFirst {
// Tell the test JVM if the cluster JVM is running under a debugger so that tests can
Expand All @@ -240,6 +256,12 @@ integTest {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}

if (printLogs) {
testLogging {
showStandardStreams = true
outputs.upToDateWhen {false}
}
}
}

testClusters.integTest {
Expand Down Expand Up @@ -670,6 +692,7 @@ dependencies {
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.7.2'
testRuntimeOnly 'org.junit.vintage:junit-vintage-engine:5.7.2'
testCompileOnly 'junit:junit:4.13.2'
implementation group: 'org.javassist', name: 'javassist', version:'3.28.0-GA'
}

compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
Expand Down Expand Up @@ -775,3 +798,13 @@ task updateVersion {
ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true)
}
}

// show test results so that we can record information like precion/recall results of correctness testing.
if (printLogs) {
test {
testLogging {
showStandardStreams = true
outputs.upToDateWhen {false}
}
}
}
258 changes: 258 additions & 0 deletions src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,258 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/

package org.opensearch.ad.e2e;

import static org.opensearch.ad.TestHelpers.toHttpEntity;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE;

import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;

import org.apache.http.HttpHeaders;
import org.apache.http.message.BasicHeader;
import org.opensearch.ad.ODFERestTestCase;
import org.opensearch.ad.TestHelpers;
import org.opensearch.client.Request;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.Response;
import org.opensearch.client.RestClient;
import org.opensearch.client.WarningsHandler;
import org.opensearch.common.Strings;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.json.JsonXContent;

import com.google.common.collect.ImmutableList;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;

public class AbstractSyntheticDataTest extends ODFERestTestCase {

/**
* In real time AD, we mute a node for a detector if that node keeps returning
* ResourceNotFoundException (5 times in a row). This is a problem for batch mode
* testing as we issue a large amount of requests quickly. Due to the speed, we
* won't be able to finish cold start before the ResourceNotFoundException mutes
* a node. Since our test case has only one node, there is no other nodes to fall
* back on. Here we disable such fault tolerance by setting max retries before
* muting to a large number and the actual wait time during muting to 0.
*
* @throws IOException when failing to create http request body
*/
protected void disableResourceNotFoundFaultTolerence() throws IOException {
XContentBuilder settingCommand = JsonXContent.contentBuilder();

settingCommand.startObject();
settingCommand.startObject("persistent");
settingCommand.field(MAX_RETRY_FOR_UNRESPONSIVE_NODE.getKey(), 100_000);
settingCommand.field(BACKOFF_MINUTES.getKey(), 0);
settingCommand.endObject();
settingCommand.endObject();
Request request = new Request("PUT", "/_cluster/settings");
request.setJsonEntity(Strings.toString(settingCommand));

adminClient().performRequest(request);
}

protected List<JsonObject> getData(String datasetFileName) throws Exception {
JsonArray jsonArray = new JsonParser()
.parse(new FileReader(new File(getClass().getResource(datasetFileName).toURI())))
.getAsJsonArray();
List<JsonObject> list = new ArrayList<>(jsonArray.size());
jsonArray.iterator().forEachRemaining(i -> list.add(i.getAsJsonObject()));
return list;
}

protected Map<String, Object> getDetectionResult(String detectorId, Instant begin, Instant end, RestClient client) {
try {
Request request = new Request("POST", String.format("/_opendistro/_anomaly_detection/detectors/%s/_run", detectorId));
request
.setJsonEntity(
String.format(Locale.ROOT, "{ \"period_start\": %d, \"period_end\": %d }", begin.toEpochMilli(), end.toEpochMilli())
);
return entityAsMap(client.performRequest(request));
} catch (Exception e) {
throw new RuntimeException(e);
}
}

protected void bulkIndexTrainData(
String datasetName,
List<JsonObject> data,
int trainTestSplit,
RestClient client,
String categoryField
) throws Exception {
Request request = new Request("PUT", datasetName);
String requestBody = null;
if (Strings.isEmpty(categoryField)) {
requestBody = "{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"},"
+ " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" } } } }";
} else {
requestBody = String
.format(
Locale.ROOT,
"{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"},"
+ " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" },"
+ "\"%s\": { \"type\": \"keyword\"} } } }",
categoryField
);
}

request.setJsonEntity(requestBody);
setWarningHandler(request, false);
client.performRequest(request);
Thread.sleep(1_000);

StringBuilder bulkRequestBuilder = new StringBuilder();
for (int i = 0; i < trainTestSplit; i++) {
bulkRequestBuilder.append("{ \"index\" : { \"_index\" : \"" + datasetName + "\", \"_id\" : \"" + i + "\" } }\n");
bulkRequestBuilder.append(data.get(i).toString()).append("\n");
}
Response response = TestHelpers
.makeRequest(
client,
"POST",
"_bulk?refresh=true",
null,
toHttpEntity(bulkRequestBuilder.toString()),
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
JsonObject json = new JsonParser().parse(new InputStreamReader(response.getEntity().getContent())).getAsJsonObject();
Thread.sleep(5_000);
waitAllSyncheticDataIngested(trainTestSplit, datasetName, client);
}

protected void bulkIndexTestData(List<JsonObject> data, String datasetName, int trainTestSplit, RestClient client) throws Exception {
StringBuilder bulkRequestBuilder = new StringBuilder();
for (int i = trainTestSplit; i < data.size(); i++) {
bulkRequestBuilder.append("{ \"index\" : { \"_index\" : \"" + datasetName + "\", \"_id\" : \"" + i + "\" } }\n");
bulkRequestBuilder.append(data.get(i).toString()).append("\n");
}
TestHelpers
.makeRequest(
client,
"POST",
"_bulk?refresh=true",
null,
toHttpEntity(bulkRequestBuilder.toString()),
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
Thread.sleep(1_000);
waitAllSyncheticDataIngested(data.size(), datasetName, client);
}

protected String createDetector(
String datasetName,
int intervalMinutes,
RestClient client,
String categoryField,
long windowDelayInMins
) throws Exception {
Request request = new Request("POST", "/_plugins/_anomaly_detection/detectors/");
String requestBody = null;
if (Strings.isEmpty(categoryField)) {
requestBody = String
.format(
Locale.ROOT,
"{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\""
+ ", \"indices\": [\"%s\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": "
+ "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\""
+ ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": "
+ "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }, "
+ "\"window_delay\": { \"period\": {\"interval\": %d, \"unit\": \"MINUTES\"}},"
+ "\"schema_version\": 0 }",
datasetName,
intervalMinutes,
windowDelayInMins
);
} else {
requestBody = String
.format(
Locale.ROOT,
"{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\""
+ ", \"indices\": [\"%s\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": "
+ "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\""
+ ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": "
+ "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }, "
+ "\"category_field\": [\"%s\"], "
+ "\"window_delay\": { \"period\": {\"interval\": %d, \"unit\": \"MINUTES\"}},"
+ "\"schema_version\": 0 }",
datasetName,
intervalMinutes,
categoryField,
windowDelayInMins
);
}

request.setJsonEntity(requestBody);
Map<String, Object> response = entityAsMap(client.performRequest(request));
String detectorId = (String) response.get("_id");
Thread.sleep(1_000);
return detectorId;
}

protected void waitAllSyncheticDataIngested(int expectedSize, String datasetName, RestClient client) throws Exception {
int maxWaitCycles = 5;
do {
Request request = new Request("POST", String.format(Locale.ROOT, "/%s/_search", datasetName));
request
.setJsonEntity(
String
.format(
Locale.ROOT,
"{\"query\": {"
+ " \"match_all\": {}"
+ " },"
+ " \"size\": 1,"
+ " \"sort\": ["
+ " {"
+ " \"timestamp\": {"
+ " \"order\": \"desc\""
+ " }"
+ " }"
+ " ]}"
)
);
// Make sure all of the test data has been ingested
// Expected response:
// "_index":"synthetic","_type":"_doc","_id":"10080","_score":null,"_source":{"timestamp":"2019-11-08T00:00:00Z","Feature1":156.30028000000001,"Feature2":100.211205,"host":"host1"},"sort":[1573171200000]}
Response response = client.performRequest(request);
JsonObject json = new JsonParser().parse(new InputStreamReader(response.getEntity().getContent())).getAsJsonObject();
JsonArray hits = json.getAsJsonObject("hits").getAsJsonArray("hits");
if (hits != null
&& hits.size() == 1
&& expectedSize - 1 == hits.get(0).getAsJsonObject().getAsJsonPrimitive("_id").getAsLong()) {
break;
} else {
request = new Request("POST", String.format(Locale.ROOT, "/%s/_refresh", datasetName));
client.performRequest(request);
}
Thread.sleep(5_000);
} while (maxWaitCycles-- >= 0);
}

protected void setWarningHandler(Request request, boolean strictDeprecationMode) {
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(strictDeprecationMode ? WarningsHandler.STRICT : WarningsHandler.PERMISSIVE);
request.setOptions(options.build());
}

}
Loading

0 comments on commit 21e8f42

Please sign in to comment.