Skip to content

Commit

Permalink
AD model performance benchmark
Browse files Browse the repository at this point in the history
This PR adds a HCAD model performance benchmark so that we can compare model performance across versions.

Regarding benchmark data, we randomly generated synthetic data with known anomalies inserted throughout the signal. In particular, these are one/two/four dimensional data where each dimension is a noisy cosine wave. Anomalies are inserted into one dimension with 0.003 probability. Anomalies across each dimension can be independent or dependent. We have approximately 5000 observations per data set. The data set is generated using the same random seed so the result is comparable across versions.

We also backported opensearch-project#600 so that we can capture the performance data in CI output.

Testing done:
* added unit tests to run the benchmark.

Signed-off-by: Kaituo Li <kaituo@amazon.com>
  • Loading branch information
kaituo committed Nov 15, 2022
1 parent 2ebaa08 commit ecd920c
Show file tree
Hide file tree
Showing 9 changed files with 789 additions and 217 deletions.
32 changes: 32 additions & 0 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: Run AD benchmark
on:
push:
branches:
- "*"
pull_request:
branches:
- "*"

jobs:
Build-ad:
strategy:
matrix:
java: [14]

name: Run Anomaly detection model performance benchmark
runs-on: ubuntu-latest

steps:
- name: Setup Java ${{ matrix.java }}
uses: actions/setup-java@v1
with:
java-version: ${{ matrix.java }}

# anomaly-detection
- name: Checkout AD
uses: actions/checkout@v2

- name: Build and Run Tests
run: |
./gradlew ':test' --tests "org.opensearch.ad.ml.HCADModelPerfTests" -Dtests.seed=2AEBDBBAE75AC5E0 -Dtests.security.manager=false -Dtests.locale=es-CU -Dtests.timezone=Chile/EasterIsland -Dtest.logs=true -Dmodel-benchmark=true
./gradlew integTest --tests "org.opensearch.ad.e2e.SingleStreamModelPerfIT" -Dtests.seed=60CDDB34427ACD0C -Dtests.security.manager=false -Dtests.locale=kab-DZ -Dtests.timezone=Asia/Hebron -Dtest.logs=true -Dmodel-benchmark=true
1 change: 1 addition & 0 deletions DEVELOPER_GUIDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ Currently we just put RCF jar in lib as dependency. Plan to publish to Maven and
8. `./gradlew adBwcCluster#rollingUpgradeClusterTask -Dtests.security.manager=false` launches a cluster with three nodes of bwc version of OpenSearch with anomaly-detection and job-scheduler and tests backwards compatibility by performing rolling upgrade of each node with the current version of OpenSearch with anomaly-detection and job-scheduler.
9. `./gradlew adBwcCluster#fullRestartClusterTask -Dtests.security.manager=false` launches a cluster with three nodes of bwc version of OpenSearch with anomaly-detection and job-scheduler and tests backwards compatibility by performing a full restart on the cluster upgrading all the nodes with the current version of OpenSearch with anomaly-detection and job-scheduler.
10. `./gradlew bwcTestSuite -Dtests.security.manager=false` runs all the above bwc tests combined.
11. `./gradlew ':test' --tests "org.opensearch.ad.ml.HCADModelPerfTests" -Dtests.seed=2AEBDBBAE75AC5E0 -Dtests.security.manager=false -Dtests.locale=es-CU -Dtests.timezone=Chile/EasterIsland -Dtest.logs=true -Dhcad-benchmark=true` launches AD model performance tests and logs the result in the standard output

When launching a cluster using one of the above commands logs are placed in `/build/cluster/run node0/opensearch-<version>/logs`. Though the logs are teed to the console, in practices it's best to check the actual log file.

Expand Down
32 changes: 32 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ buildscript {
opensearch_build = opensearch_version.replaceAll(/(\.\d)([^\d]*)$/, '$1.0$2')
common_utils_version = System.getProperty("common_utils.version", opensearch_build)
job_scheduler_version = System.getProperty("job_scheduler.version", opensearch_build)
// gradle build won't print logs during test by default unless there is a failure.
// It is useful to record intermediately information like prediction precision and recall.
// This option turn on log printing during tests.
printLogs = "true" == System.getProperty("test.logs", "false")
}

repositories {
Expand Down Expand Up @@ -154,6 +158,12 @@ opensearch_tmp_dir.mkdirs()
test {
include '**/*Tests.class'
systemProperty 'tests.security.manager', 'false'

if (System.getProperty("model-benchmark") == null || System.getProperty("model-benchmark") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.ml.HCADModelPerfTests"
}
}
}

task integTest(type: RestIntegTestTask) {
Expand Down Expand Up @@ -192,6 +202,12 @@ integTest {
}
}

if (System.getProperty("model-benchmark") == null || System.getProperty("model-benchmark") == "false") {
filter {
excludeTestsMatching "org.opensearch.ad.e2e.SingleStreamModelPerfIT"
}
}

// The 'doFirst' delays till execution time.
doFirst {
// Tell the test JVM if the cluster JVM is running under a debugger so that tests can
Expand All @@ -212,6 +228,12 @@ integTest {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}

if (printLogs) {
testLogging {
showStandardStreams = true
outputs.upToDateWhen {false}
}
}
}

testClusters.integTest {
Expand Down Expand Up @@ -703,3 +725,13 @@ validateNebulaPom.enabled = false
tasks.withType(licenseHeaders.class) {
additionalLicense 'AL ', 'Apache', 'Licensed under the Apache License, Version 2.0 (the "License")'
}

// show test results so that we can record information like precion/recall results of correctness testing.
if (printLogs) {
test {
testLogging {
showStandardStreams = true
outputs.upToDateWhen {false}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;

public class DetectionResultEvalutationIT extends ODFERestTestCase {
protected static final Logger LOG = (Logger) LogManager.getLogger(DetectionResultEvalutationIT.class);
public class SingleStreamModelPerfIT extends ODFERestTestCase {
protected static final Logger LOG = (Logger) LogManager.getLogger(SingleStreamModelPerfIT.class);

// TODO: fix flaky test, sometimes this assert will fail "assertTrue(precision >= minPrecision);"
public void testDataset() throws Exception {
Expand Down Expand Up @@ -108,6 +108,7 @@ private void verifyTestResults(
assertTrue(recall >= minRecall);

assertTrue(errors <= maxError);
LOG.info("Precision: {}, Window recall: {}", precision, recall);
}

private int isAnomaly(Instant time, List<Entry<Instant, Instant>> labels) {
Expand Down
247 changes: 247 additions & 0 deletions src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,247 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/

package org.opensearch.ad.ml;

import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE;

import java.time.Clock;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;

import org.opensearch.Version;
import org.opensearch.action.ActionListener;
import org.opensearch.action.get.GetRequest;
import org.opensearch.action.get.GetResponse;
import org.opensearch.ad.AbstractADTest;
import org.opensearch.ad.AnomalyDetectorPlugin;
import org.opensearch.ad.MemoryTracker;
import org.opensearch.ad.NodeStateManager;
import org.opensearch.ad.TestHelpers;
import org.opensearch.ad.dataprocessor.IntegerSensitiveSingleFeatureLinearUniformInterpolator;
import org.opensearch.ad.dataprocessor.Interpolator;
import org.opensearch.ad.dataprocessor.LinearUniformInterpolator;
import org.opensearch.ad.dataprocessor.SingleFeatureLinearUniformInterpolator;
import org.opensearch.ad.feature.FeatureManager;
import org.opensearch.ad.feature.SearchFeatureDao;
import org.opensearch.ad.model.AnomalyDetector;
import org.opensearch.ad.model.Entity;
import org.opensearch.ad.model.IntervalTimeConfiguration;
import org.opensearch.ad.ratelimit.CheckpointWriteWorker;
import org.opensearch.ad.settings.AnomalyDetectorSettings;
import org.opensearch.ad.util.ClientUtil;
import org.opensearch.client.Client;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.node.DiscoveryNodeRole;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
import org.opensearch.test.ClusterServiceUtils;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.threadpool.ThreadPool;

import com.google.common.collect.ImmutableList;

public class AbstractCosineDataTest extends AbstractADTest {
protected int numMinSamples;
protected String modelId;
protected String entityName;
protected String detectorId;
protected ModelState<EntityModel> modelState;
protected Clock clock;
protected float priority;
protected EntityColdStarter entityColdStarter;
protected NodeStateManager stateManager;
protected SearchFeatureDao searchFeatureDao;
protected Interpolator interpolator;
protected CheckpointDao checkpoint;
protected FeatureManager featureManager;
protected Settings settings;
protected ThreadPool threadPool;
protected AtomicBoolean released;
protected Runnable releaseSemaphore;
protected ActionListener<Void> listener;
protected CountDownLatch inProgressLatch;
protected CheckpointWriteWorker checkpointWriteQueue;
protected Entity entity;
protected AnomalyDetector detector;
protected long rcfSeed;
protected ClientUtil clientUtil;
protected ModelManager modelManager;

@SuppressWarnings("unchecked")
@Override
public void setUp() throws Exception {
super.setUp();
numMinSamples = AnomalyDetectorSettings.NUM_MIN_SAMPLES;

clock = mock(Clock.class);
when(clock.instant()).thenReturn(Instant.now());

threadPool = mock(ThreadPool.class);
setUpADThreadPool(threadPool);

settings = Settings.EMPTY;

Client client = mock(Client.class);
clientUtil = mock(ClientUtil.class);

detector = TestHelpers.AnomalyDetectorBuilder
.newInstance()
.setDetectionInterval(new IntervalTimeConfiguration(1, ChronoUnit.MINUTES))
.setCategoryFields(ImmutableList.of(randomAlphaOfLength(5)))
.build();
when(clock.millis()).thenReturn(1602401500000L);
doAnswer(invocation -> {
GetRequest request = invocation.getArgument(0);
ActionListener<GetResponse> listener = invocation.getArgument(2);

listener.onResponse(TestHelpers.createGetResponse(detector, detectorId, AnomalyDetector.ANOMALY_DETECTORS_INDEX));

return null;
}).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class));

Set<Setting<?>> nodestateSetting = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
nodestateSetting.add(MAX_RETRY_FOR_UNRESPONSIVE_NODE);
nodestateSetting.add(BACKOFF_MINUTES);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, nodestateSetting);

DiscoveryNode discoveryNode = new DiscoveryNode(
"node1",
OpenSearchTestCase.buildNewFakeTransportAddress(),
Collections.emptyMap(),
DiscoveryNodeRole.BUILT_IN_ROLES,
Version.CURRENT
);

ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, discoveryNode, clusterSettings);

stateManager = new NodeStateManager(
client,
xContentRegistry(),
settings,
clientUtil,
clock,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
clusterService
);

SingleFeatureLinearUniformInterpolator singleFeatureLinearUniformInterpolator =
new IntegerSensitiveSingleFeatureLinearUniformInterpolator();
interpolator = new LinearUniformInterpolator(singleFeatureLinearUniformInterpolator);

searchFeatureDao = mock(SearchFeatureDao.class);
checkpoint = mock(CheckpointDao.class);

featureManager = new FeatureManager(
searchFeatureDao,
interpolator,
clock,
AnomalyDetectorSettings.MAX_TRAIN_SAMPLE,
AnomalyDetectorSettings.MAX_SAMPLE_STRIDE,
AnomalyDetectorSettings.TRAIN_SAMPLE_TIME_RANGE_IN_HOURS,
AnomalyDetectorSettings.MIN_TRAIN_SAMPLES,
AnomalyDetectorSettings.MAX_SHINGLE_PROPORTION_MISSING,
AnomalyDetectorSettings.MAX_IMPUTATION_NEIGHBOR_DISTANCE,
AnomalyDetectorSettings.PREVIEW_SAMPLE_RATE,
AnomalyDetectorSettings.MAX_PREVIEW_SAMPLES,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
threadPool,
AnomalyDetectorPlugin.AD_THREAD_POOL_NAME
);

checkpointWriteQueue = mock(CheckpointWriteWorker.class);

rcfSeed = 2051L;
entityColdStarter = new EntityColdStarter(
clock,
threadPool,
stateManager,
AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE,
AnomalyDetectorSettings.NUM_TREES,
AnomalyDetectorSettings.TIME_DECAY,
numMinSamples,
AnomalyDetectorSettings.MAX_SAMPLE_STRIDE,
AnomalyDetectorSettings.MAX_TRAIN_SAMPLE,
interpolator,
searchFeatureDao,
AnomalyDetectorSettings.THRESHOLD_MIN_PVALUE,
featureManager,
settings,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
checkpointWriteQueue,
rcfSeed,
AnomalyDetectorSettings.MAX_COLD_START_ROUNDS
);

detectorId = "123";
modelId = "123_entity_abc";
entityName = "abc";
priority = 0.3f;
entity = Entity.createSingleAttributeEntity("field", entityName);

released = new AtomicBoolean();

inProgressLatch = new CountDownLatch(1);
releaseSemaphore = () -> {
released.set(true);
inProgressLatch.countDown();
};
listener = ActionListener.wrap(releaseSemaphore);

modelManager = new ModelManager(
mock(CheckpointDao.class),
mock(Clock.class),
AnomalyDetectorSettings.NUM_TREES,
AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE,
AnomalyDetectorSettings.TIME_DECAY,
AnomalyDetectorSettings.NUM_MIN_SAMPLES,
AnomalyDetectorSettings.THRESHOLD_MIN_PVALUE,
AnomalyDetectorSettings.MIN_PREVIEW_SIZE,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
entityColdStarter,
mock(FeatureManager.class),
mock(MemoryTracker.class)
);
}

public int searchInsert(long[] timestamps, long target) {
int pivot, left = 0, right = timestamps.length - 1;
while (left <= right) {
pivot = left + (right - left) / 2;
if (timestamps[pivot] == target)
return pivot;
if (target < timestamps[pivot])
right = pivot - 1;
else
left = pivot + 1;
}
return left;
}

protected void checkSemaphoreRelease() throws InterruptedException {
assertTrue(inProgressLatch.await(100, TimeUnit.SECONDS));
assertTrue(released.get());
}
}
Loading

0 comments on commit ecd920c

Please sign in to comment.