Skip to content

Commit

Permalink
AD model performance benchmark
Browse files Browse the repository at this point in the history
This PR adds a HCAD model performance benchmark so that we can compare model performance across versions.

Regarding benchmark data, we randomly generated synthetic data with known anomalies inserted throughout the signal. In particular, these are one/two/four dimensional data where each dimension is a noisy cosine wave. Anomalies are inserted into one dimension with 0.003 probability. Anomalies across each dimension can be independent or dependent. We have approximately 5000 observations per data set. The data set is generated using the same random seed so the result is comparable across versions.

We also backported #600 so that we can capture the performance data in CI output.

Testing done:
* added unit tests to run the benchmark.

Signed-off-by: Kaituo Li <kaituo@amazon.com>
  • Loading branch information
kaituo committed Nov 11, 2022
1 parent df4c94e commit 739667f
Show file tree
Hide file tree
Showing 7 changed files with 696 additions and 223 deletions.
43 changes: 43 additions & 0 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
name: Run AD benchmark
on:
push:
branches:
- "*"
pull_request:
branches:
- "*"

jobs:
Build-ad:
strategy:
matrix:
java: [8, 11, 14]
fail-fast: false

name: Run Anomaly detection model performance benchmark
runs-on: ubuntu-latest

steps:
- name: Setup Java ${{ matrix.java }}
uses: actions/setup-java@v1
with:
java-version: ${{ matrix.java }}

# anomaly-detection
- name: Checkout AD
uses: actions/checkout@v2

- name: Assemble anomaly-detection
run: |
./gradlew assemble -Dopensearch.version=1.3.4-SNAPSHOT
echo "Creating ./src/test/resources/org/opensearch/ad/bwc/anomaly-detection/1.3.4.0-SNAPSHOT ..."
mkdir -p ./src/test/resources/org/opensearch/ad/bwc/anomaly-detection/1.3.4.0-SNAPSHOT
echo "Copying ./build/distributions/*.zip to ./src/test/resources/org/opensearch/ad/bwc/anomaly-detection/1.3.4.0-SNAPSHOT ..."
ls ./build/distributions/
cp ./build/distributions/*.zip ./src/test/resources/org/opensearch/ad/bwc/anomaly-detection/1.3.4.0-SNAPSHOT
echo "Copied ./build/distributions/*.zip to ./src/test/resources/org/opensearch/ad/bwc/anomaly-detection/1.3.4.0-SNAPSHOT ..."
ls ./src/test/resources/org/opensearch/ad/bwc/anomaly-detection/1.3.4.0-SNAPSHOT
- name: Build and Run Tests
run: |
./gradlew ':test' --tests "org.opensearch.ad.ml.HCADModelPerfTests" -Dtests.seed=2AEBDBBAE75AC5E0 -Dtests.security.manager=false -Dtests.locale=es-CU -Dtests.timezone=Chile/EasterIsland -Dtest.logs=true -Dhcad-benchmark=true
./gradlew integTest --tests "org.opensearch.ad.e2e.DetectionResultEvalutationIT.testDataset" -Dtests.seed=60CDDB34427ACD0C -Dtests.security.manager=false -Dtests.locale=kab-DZ -Dtests.timezone=Asia/Hebron -Dtest.logs=true
27 changes: 27 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@ buildscript {
'opensearch-anomaly-detection-1.1.0.0.zip'
bwcOpenSearchJSDownload = 'https://ci.opensearch.org/ci/dbc/bundle-build/1.1.0/20210930/linux/x64/builds/opensearch/plugins/' +
'opensearch-job-scheduler-1.1.0.0.zip'
// gradle build won't print logs during test by default unless there is a failure.
// It is useful to record intermediately information like prediction precision and recall.
// This option turn on log printing during tests.
printLogs = "true" == System.getProperty("test.logs", "false")
}

repositories {
Expand Down Expand Up @@ -175,6 +179,12 @@ test {
}
include '**/*Tests.class'
systemProperty 'tests.security.manager', 'false'

if (System.getProperty("hcad-benchmark") == null) {
filter {
excludeTestsMatching "org.opensearch.ad.ml.HCADModelPerfTests"
}
}
}

task integTest(type: RestIntegTestTask) {
Expand Down Expand Up @@ -240,6 +250,12 @@ integTest {
jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=*:5005'
}

if (printLogs) {
testLogging {
showStandardStreams = true
outputs.upToDateWhen {false}
}
}
}

testClusters.integTest {
Expand Down Expand Up @@ -670,6 +686,7 @@ dependencies {
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.7.2'
testRuntimeOnly 'org.junit.vintage:junit-vintage-engine:5.7.2'
testCompileOnly 'junit:junit:4.13.2'
implementation group: 'org.javassist', name: 'javassist', version:'3.28.0-GA'
}

compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
Expand Down Expand Up @@ -775,3 +792,13 @@ task updateVersion {
ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true)
}
}

// show test results so that we can record information like precion/recall results of correctness testing.
if (printLogs) {
test {
testLogging {
showStandardStreams = true
outputs.upToDateWhen {false}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ private void verifyTestResults(
assertTrue(recall >= minRecall);

assertTrue(errors <= maxError);
LOG.info("Precision: {}, Window recall: {}", precision, recall);
}

private int isAnomaly(Instant time, List<Entry<Instant, Instant>> labels) {
Expand Down
247 changes: 247 additions & 0 deletions src/test/java/org/opensearch/ad/ml/AbstractModelPerfTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,247 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/

package org.opensearch.ad.ml;

import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE;

import java.time.Clock;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;

import org.opensearch.Version;
import org.opensearch.action.ActionListener;
import org.opensearch.action.get.GetRequest;
import org.opensearch.action.get.GetResponse;
import org.opensearch.ad.AbstractADTest;
import org.opensearch.ad.AnomalyDetectorPlugin;
import org.opensearch.ad.MemoryTracker;
import org.opensearch.ad.NodeStateManager;
import org.opensearch.ad.TestHelpers;
import org.opensearch.ad.dataprocessor.IntegerSensitiveSingleFeatureLinearUniformInterpolator;
import org.opensearch.ad.dataprocessor.Interpolator;
import org.opensearch.ad.dataprocessor.LinearUniformInterpolator;
import org.opensearch.ad.dataprocessor.SingleFeatureLinearUniformInterpolator;
import org.opensearch.ad.feature.FeatureManager;
import org.opensearch.ad.feature.SearchFeatureDao;
import org.opensearch.ad.model.AnomalyDetector;
import org.opensearch.ad.model.Entity;
import org.opensearch.ad.model.IntervalTimeConfiguration;
import org.opensearch.ad.ratelimit.CheckpointWriteWorker;
import org.opensearch.ad.settings.AnomalyDetectorSettings;
import org.opensearch.ad.util.ClientUtil;
import org.opensearch.client.Client;
import org.opensearch.cluster.node.DiscoveryNode;
import org.opensearch.cluster.node.DiscoveryNodeRole;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
import org.opensearch.test.ClusterServiceUtils;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.threadpool.ThreadPool;

import com.google.common.collect.ImmutableList;

public class AbstractModelPerfTest extends AbstractADTest {
protected int numMinSamples;
protected String modelId;
protected String entityName;
protected String detectorId;
protected ModelState<EntityModel> modelState;
protected Clock clock;
protected float priority;
protected EntityColdStarter entityColdStarter;
protected NodeStateManager stateManager;
protected SearchFeatureDao searchFeatureDao;
protected Interpolator interpolator;
protected CheckpointDao checkpoint;
protected FeatureManager featureManager;
protected Settings settings;
protected ThreadPool threadPool;
protected AtomicBoolean released;
protected Runnable releaseSemaphore;
protected ActionListener<Void> listener;
protected CountDownLatch inProgressLatch;
protected CheckpointWriteWorker checkpointWriteQueue;
protected Entity entity;
protected AnomalyDetector detector;
protected long rcfSeed;
protected ClientUtil clientUtil;
protected ModelManager modelManager;

@SuppressWarnings("unchecked")
@Override
public void setUp() throws Exception {
super.setUp();
numMinSamples = AnomalyDetectorSettings.NUM_MIN_SAMPLES;

clock = mock(Clock.class);
when(clock.instant()).thenReturn(Instant.now());

threadPool = mock(ThreadPool.class);
setUpADThreadPool(threadPool);

settings = Settings.EMPTY;

Client client = mock(Client.class);
clientUtil = mock(ClientUtil.class);

detector = TestHelpers.AnomalyDetectorBuilder
.newInstance()
.setDetectionInterval(new IntervalTimeConfiguration(1, ChronoUnit.MINUTES))
.setCategoryFields(ImmutableList.of(randomAlphaOfLength(5)))
.build();
when(clock.millis()).thenReturn(1602401500000L);
doAnswer(invocation -> {
GetRequest request = invocation.getArgument(0);
ActionListener<GetResponse> listener = invocation.getArgument(2);

listener.onResponse(TestHelpers.createGetResponse(detector, detectorId, AnomalyDetector.ANOMALY_DETECTORS_INDEX));

return null;
}).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class));

Set<Setting<?>> nodestateSetting = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
nodestateSetting.add(MAX_RETRY_FOR_UNRESPONSIVE_NODE);
nodestateSetting.add(BACKOFF_MINUTES);
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, nodestateSetting);

DiscoveryNode discoveryNode = new DiscoveryNode(
"node1",
OpenSearchTestCase.buildNewFakeTransportAddress(),
Collections.emptyMap(),
DiscoveryNodeRole.BUILT_IN_ROLES,
Version.CURRENT
);

ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool, discoveryNode, clusterSettings);

stateManager = new NodeStateManager(
client,
xContentRegistry(),
settings,
clientUtil,
clock,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
clusterService
);

SingleFeatureLinearUniformInterpolator singleFeatureLinearUniformInterpolator =
new IntegerSensitiveSingleFeatureLinearUniformInterpolator();
interpolator = new LinearUniformInterpolator(singleFeatureLinearUniformInterpolator);

searchFeatureDao = mock(SearchFeatureDao.class);
checkpoint = mock(CheckpointDao.class);

featureManager = new FeatureManager(
searchFeatureDao,
interpolator,
clock,
AnomalyDetectorSettings.MAX_TRAIN_SAMPLE,
AnomalyDetectorSettings.MAX_SAMPLE_STRIDE,
AnomalyDetectorSettings.TRAIN_SAMPLE_TIME_RANGE_IN_HOURS,
AnomalyDetectorSettings.MIN_TRAIN_SAMPLES,
AnomalyDetectorSettings.MAX_SHINGLE_PROPORTION_MISSING,
AnomalyDetectorSettings.MAX_IMPUTATION_NEIGHBOR_DISTANCE,
AnomalyDetectorSettings.PREVIEW_SAMPLE_RATE,
AnomalyDetectorSettings.MAX_PREVIEW_SAMPLES,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
threadPool,
AnomalyDetectorPlugin.AD_THREAD_POOL_NAME
);

checkpointWriteQueue = mock(CheckpointWriteWorker.class);

rcfSeed = 2051L;
entityColdStarter = new EntityColdStarter(
clock,
threadPool,
stateManager,
AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE,
AnomalyDetectorSettings.NUM_TREES,
AnomalyDetectorSettings.TIME_DECAY,
numMinSamples,
AnomalyDetectorSettings.MAX_SAMPLE_STRIDE,
AnomalyDetectorSettings.MAX_TRAIN_SAMPLE,
interpolator,
searchFeatureDao,
AnomalyDetectorSettings.THRESHOLD_MIN_PVALUE,
featureManager,
settings,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
checkpointWriteQueue,
rcfSeed,
AnomalyDetectorSettings.MAX_COLD_START_ROUNDS
);

detectorId = "123";
modelId = "123_entity_abc";
entityName = "abc";
priority = 0.3f;
entity = Entity.createSingleAttributeEntity("field", entityName);

released = new AtomicBoolean();

inProgressLatch = new CountDownLatch(1);
releaseSemaphore = () -> {
released.set(true);
inProgressLatch.countDown();
};
listener = ActionListener.wrap(releaseSemaphore);

modelManager = new ModelManager(
mock(CheckpointDao.class),
mock(Clock.class),
AnomalyDetectorSettings.NUM_TREES,
AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE,
AnomalyDetectorSettings.TIME_DECAY,
AnomalyDetectorSettings.NUM_MIN_SAMPLES,
AnomalyDetectorSettings.THRESHOLD_MIN_PVALUE,
AnomalyDetectorSettings.MIN_PREVIEW_SIZE,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
AnomalyDetectorSettings.HOURLY_MAINTENANCE,
entityColdStarter,
mock(FeatureManager.class),
mock(MemoryTracker.class)
);
}

protected void checkSemaphoreRelease() throws InterruptedException {
assertTrue(inProgressLatch.await(100, TimeUnit.SECONDS));
assertTrue(released.get());
}

public int searchInsert(long[] timestamps, long target) {
int pivot, left = 0, right = timestamps.length - 1;
while (left <= right) {
pivot = left + (right - left) / 2;
if (timestamps[pivot] == target)
return pivot;
if (target < timestamps[pivot])
right = pivot - 1;
else
left = pivot + 1;
}
return left;
}
}
Loading

0 comments on commit 739667f

Please sign in to comment.