Skip to content

Commit

Permalink
AD model performance benchmark (#730)
Browse files Browse the repository at this point in the history
This PR adds an AD model performance benchmark so that we can compare model performance across versions.

Regarding benchmark data, we randomly generated synthetic data with known anomalies inserted throughout the signal. In particular, these are one/two/four dimensional data where each dimension is a noisy cosine wave. Anomalies are inserted into one dimension with 0.003 probability. Anomalies across each dimension can be independent or dependent. We have approximately 5000 observations per data set. The data set is generated using the same random seed so the result is comparable across versions.

We also backported #600 so that we can capture the performance data in CI output.

Testing done:
* added unit tests to run the benchmark.

Signed-off-by: Kaituo Li <kaituo@amazon.com>
  • Loading branch information
kaituo authored Nov 18, 2022
1 parent 5f45bf1 commit d5d0436
Show file tree
Hide file tree
Showing 11 changed files with 1,161 additions and 609 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
ls ./src/test/resources/org/opensearch/ad/bwc/anomaly-detection/$plugin_version
- name: Build and Run Tests
run: |
./gradlew build -Dtest.logs=true
./gradlew build
- name: Publish to Maven Local
run: |
./gradlew publishToMavenLocal
Expand Down
33 changes: 33 additions & 0 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: Run AD benchmark
on:
push:
branches:
- "*"
pull_request:
branches:
- "*"

jobs:
Build-ad:
strategy:
matrix:
java: [17]
fail-fast: false

name: Run Anomaly detection model performance benchmark
runs-on: ubuntu-latest

steps:
- name: Setup Java ${{ matrix.java }}
uses: actions/setup-java@v1
with:
java-version: ${{ matrix.java }}

# anomaly-detection
- name: Checkout AD
uses: actions/checkout@v2

- name: Build and Run Tests
run: |
./gradlew ':test' --tests "org.opensearch.ad.ml.HCADModelPerfTests" -Dtests.seed=2AEBDBBAE75AC5E0 -Dtests.security.manager=false -Dtests.locale=es-CU -Dtests.timezone=Chile/EasterIsland -Dtest.logs=true -Dmodel-benchmark=true
./gradlew integTest --tests "org.opensearch.ad.e2e.SingleStreamModelPerfIT" -Dtests.seed=60CDDB34427ACD0C -Dtests.security.manager=false -Dtests.locale=kab-DZ -Dtests.timezone=Asia/Hebron -Dtest.logs=true -Dmodel-benchmark=true
2 changes: 2 additions & 0 deletions DEVELOPER_GUIDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ Currently we just put RCF jar in lib as dependency. Plan to publish to Maven and
8. `./gradlew adBwcCluster#rollingUpgradeClusterTask -Dtests.security.manager=false` launches a cluster with three nodes of bwc version of OpenSearch with anomaly-detection and job-scheduler and tests backwards compatibility by performing rolling upgrade of each node with the current version of OpenSearch with anomaly-detection and job-scheduler.
9. `./gradlew adBwcCluster#fullRestartClusterTask -Dtests.security.manager=false` launches a cluster with three nodes of bwc version of OpenSearch with anomaly-detection and job-scheduler and tests backwards compatibility by performing a full restart on the cluster upgrading all the nodes with the current version of OpenSearch with anomaly-detection and job-scheduler.
10. `./gradlew bwcTestSuite -Dtests.security.manager=false` runs all the above bwc tests combined.
11. `./gradlew ':test' --tests "org.opensearch.ad.ml.HCADModelPerfTests" -Dtests.seed=2AEBDBBAE75AC5E0 -Dtests.security.manager=false -Dtests.locale=es-CU -Dtests.timezone=Chile/EasterIsland -Dtest.logs=true -Dmodel-benchmark=true` launches HCAD model performance tests and logs the result in the standard output
12. `./gradlew integTest --tests "org.opensearch.ad.e2e.SingleStreamModelPerfIT" -Dtests.seed=60CDDB34427ACD0C -Dtests.security.manager=false -Dtests.locale=kab-DZ -Dtests.timezone=Asia/Hebron -Dtest.logs=true -Dmodel-benchmark=true` launches single stream AD model performance tests and logs the result in the standard output

When launching a cluster using one of the above commands logs are placed in `/build/cluster/run node0/opensearch-<version>/logs`. Though the logs are teed to the console, in practices it's best to check the actual log file.

Expand Down
12 changes: 12 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,12 @@ test {
}
include '**/*Tests.class'
systemProperty 'tests.security.manager', 'false'

if (System.getProperty("model-benchmark") == null || System.getProperty("model-benchmark") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.ml.HCADModelPerfTests"
}
}
}

task integTest(type: RestIntegTestTask) {
Expand Down Expand Up @@ -254,6 +260,12 @@ integTest {
}
}

if (System.getProperty("model-benchmark") == null || System.getProperty("model-benchmark") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.e2e.SingleStreamModelPerfIT"
}
}

// The 'doFirst' delays till execution time.
doFirst {
// Tell the test JVM if the cluster JVM is running under a debugger so that tests can
Expand Down
242 changes: 242 additions & 0 deletions src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,242 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/

package org.opensearch.ad.e2e;

import static org.opensearch.ad.TestHelpers.toHttpEntity;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE;

import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;

import org.apache.http.HttpHeaders;
import org.apache.http.message.BasicHeader;
import org.opensearch.ad.ODFERestTestCase;
import org.opensearch.ad.TestHelpers;
import org.opensearch.client.Request;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.Response;
import org.opensearch.client.RestClient;
import org.opensearch.client.WarningsHandler;
import org.opensearch.common.Strings;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.json.JsonXContent;

import com.google.common.collect.ImmutableList;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;

public class AbstractSyntheticDataTest extends ODFERestTestCase {
/**
* In real time AD, we mute a node for a detector if that node keeps returning
* ResourceNotFoundException (5 times in a row). This is a problem for batch mode
* testing as we issue a large amount of requests quickly. Due to the speed, we
* won't be able to finish cold start before the ResourceNotFoundException mutes
* a node. Since our test case has only one node, there is no other nodes to fall
* back on. Here we disable such fault tolerance by setting max retries before
* muting to a large number and the actual wait time during muting to 0.
*
* @throws IOException when failing to create http request body
*/
protected void disableResourceNotFoundFaultTolerence() throws IOException {
XContentBuilder settingCommand = JsonXContent.contentBuilder();

settingCommand.startObject();
settingCommand.startObject("persistent");
settingCommand.field(MAX_RETRY_FOR_UNRESPONSIVE_NODE.getKey(), 100_000);
settingCommand.field(BACKOFF_MINUTES.getKey(), 0);
settingCommand.endObject();
settingCommand.endObject();
Request request = new Request("PUT", "/_cluster/settings");
request.setJsonEntity(Strings.toString(settingCommand));

adminClient().performRequest(request);
}

protected List<JsonObject> getData(String datasetFileName) throws Exception {
JsonArray jsonArray = JsonParser
.parseReader(new FileReader(new File(getClass().getResource(datasetFileName).toURI()), Charset.defaultCharset()))
.getAsJsonArray();
List<JsonObject> list = new ArrayList<>(jsonArray.size());
jsonArray.iterator().forEachRemaining(i -> list.add(i.getAsJsonObject()));
return list;
}

protected Map<String, Object> getDetectionResult(String detectorId, Instant begin, Instant end, RestClient client) {
try {
Request request = new Request(
"POST",
String.format(Locale.ROOT, "/_opendistro/_anomaly_detection/detectors/%s/_run", detectorId)
);
request
.setJsonEntity(
String.format(Locale.ROOT, "{ \"period_start\": %d, \"period_end\": %d }", begin.toEpochMilli(), end.toEpochMilli())
);
return entityAsMap(client.performRequest(request));
} catch (Exception e) {
throw new RuntimeException(e);
}
}

protected void bulkIndexTrainData(
String datasetName,
List<JsonObject> data,
int trainTestSplit,
RestClient client,
String categoryField
) throws Exception {
Request request = new Request("PUT", datasetName);
String requestBody = null;
if (Strings.isEmpty(categoryField)) {
requestBody = "{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"},"
+ " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" } } } }";
} else {
requestBody = String
.format(
Locale.ROOT,
"{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"},"
+ " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" },"
+ "\"%s\": { \"type\": \"keyword\"} } } }",
categoryField
);
}

request.setJsonEntity(requestBody);
setWarningHandler(request, false);
client.performRequest(request);
Thread.sleep(1_000);

StringBuilder bulkRequestBuilder = new StringBuilder();
for (int i = 0; i < trainTestSplit; i++) {
bulkRequestBuilder.append("{ \"index\" : { \"_index\" : \"" + datasetName + "\", \"_id\" : \"" + i + "\" } }\n");
bulkRequestBuilder.append(data.get(i).toString()).append("\n");
}
TestHelpers
.makeRequest(
client,
"POST",
"_bulk?refresh=true",
null,
toHttpEntity(bulkRequestBuilder.toString()),
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
Thread.sleep(1_000);
waitAllSyncheticDataIngested(trainTestSplit, datasetName, client);
}

protected String createDetector(
String datasetName,
int intervalMinutes,
RestClient client,
String categoryField,
long windowDelayInMins
) throws Exception {
Request request = new Request("POST", "/_plugins/_anomaly_detection/detectors/");
String requestBody = null;
if (Strings.isEmpty(categoryField)) {
requestBody = String
.format(
Locale.ROOT,
"{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\""
+ ", \"indices\": [\"%s\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": "
+ "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\""
+ ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": "
+ "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }, "
+ "\"window_delay\": { \"period\": {\"interval\": %d, \"unit\": \"MINUTES\"}},"
+ "\"schema_version\": 0 }",
datasetName,
intervalMinutes,
windowDelayInMins
);
} else {
requestBody = String
.format(
Locale.ROOT,
"{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\""
+ ", \"indices\": [\"%s\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": "
+ "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\""
+ ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": "
+ "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }, "
+ "\"category_field\": [\"%s\"], "
+ "\"window_delay\": { \"period\": {\"interval\": %d, \"unit\": \"MINUTES\"}},"
+ "\"schema_version\": 0 }",
datasetName,
intervalMinutes,
categoryField,
windowDelayInMins
);
}

request.setJsonEntity(requestBody);
Map<String, Object> response = entityAsMap(client.performRequest(request));
String detectorId = (String) response.get("_id");
Thread.sleep(1_000);
return detectorId;
}

protected void waitAllSyncheticDataIngested(int expectedSize, String datasetName, RestClient client) throws Exception {
int maxWaitCycles = 3;
do {
Request request = new Request("POST", String.format(Locale.ROOT, "/%s/_search", datasetName));
request
.setJsonEntity(
String
.format(
Locale.ROOT,
"{\"query\": {"
+ " \"match_all\": {}"
+ " },"
+ " \"size\": 1,"
+ " \"sort\": ["
+ " {"
+ " \"timestamp\": {"
+ " \"order\": \"desc\""
+ " }"
+ " }"
+ " ]}"
)
);
// Make sure all of the test data has been ingested
// Expected response:
// "_index":"synthetic","_type":"_doc","_id":"10080","_score":null,"_source":{"timestamp":"2019-11-08T00:00:00Z","Feature1":156.30028000000001,"Feature2":100.211205,"host":"host1"},"sort":[1573171200000]}
Response response = client.performRequest(request);
JsonObject json = JsonParser
.parseReader(new InputStreamReader(response.getEntity().getContent(), Charset.defaultCharset()))
.getAsJsonObject();
JsonArray hits = json.getAsJsonObject("hits").getAsJsonArray("hits");
if (hits != null
&& hits.size() == 1
&& expectedSize - 1 == hits.get(0).getAsJsonObject().getAsJsonPrimitive("_id").getAsLong()) {
break;
} else {
request = new Request("POST", String.format(Locale.ROOT, "/%s/_refresh", datasetName));
client.performRequest(request);
}
Thread.sleep(1_000);
} while (maxWaitCycles-- >= 0);
}

protected void setWarningHandler(Request request, boolean strictDeprecationMode) {
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(strictDeprecationMode ? WarningsHandler.STRICT : WarningsHandler.PERMISSIVE);
request.setOptions(options.build());
}
}
Loading

0 comments on commit d5d0436

Please sign in to comment.