diff --git a/src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java b/src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java index 316094766..3f0f08df3 100644 --- a/src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java +++ b/src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java @@ -246,7 +246,7 @@ protected void runAdJob( String user = userInfo.getName(); List roles = userInfo.getRoles(); - String resultIndex = jobParameter.getResultIndex(); + String resultIndex = jobParameter.getCustomResultIndex(); if (resultIndex == null) { runAnomalyDetectionJob( jobParameter, @@ -536,7 +536,7 @@ private void stopAdJob(String detectorId, AnomalyDetectorFunction function) { Instant.now(), job.getLockDurationSeconds(), job.getUser(), - job.getResultIndex() + job.getCustomResultIndex() ); IndexRequest indexRequest = new IndexRequest(CommonName.JOB_INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) diff --git a/src/main/java/org/opensearch/ad/AnomalyDetectorPlugin.java b/src/main/java/org/opensearch/ad/AnomalyDetectorPlugin.java index a5b924046..7bbf4347b 100644 --- a/src/main/java/org/opensearch/ad/AnomalyDetectorPlugin.java +++ b/src/main/java/org/opensearch/ad/AnomalyDetectorPlugin.java @@ -175,6 +175,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; +import org.opensearch.forecast.model.Forecaster; import org.opensearch.jobscheduler.spi.JobSchedulerExtension; import org.opensearch.jobscheduler.spi.ScheduledJobParser; import org.opensearch.jobscheduler.spi.ScheduledJobRunner; @@ -955,7 +956,8 @@ public List getNamedXContent() { AnomalyDetector.XCONTENT_REGISTRY, AnomalyResult.XCONTENT_REGISTRY, DetectorInternalState.XCONTENT_REGISTRY, - AnomalyDetectorJob.XCONTENT_REGISTRY + AnomalyDetectorJob.XCONTENT_REGISTRY, + Forecaster.XCONTENT_REGISTRY ); } diff --git a/src/main/java/org/opensearch/ad/AnomalyDetectorProfileRunner.java b/src/main/java/org/opensearch/ad/AnomalyDetectorProfileRunner.java index 28e878308..b5e56ba51 100644 --- a/src/main/java/org/opensearch/ad/AnomalyDetectorProfileRunner.java +++ b/src/main/java/org/opensearch/ad/AnomalyDetectorProfileRunner.java @@ -149,7 +149,7 @@ private void prepareProfile( ActionListener listener, Set profilesToCollect ) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); GetRequest getRequest = new GetRequest(CommonName.JOB_INDEX, detectorId); client.get(getRequest, ActionListener.wrap(getResponse -> { if (getResponse != null && getResponse.isExists()) { @@ -162,7 +162,7 @@ private void prepareProfile( AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); long enabledTimeMs = job.getEnabledTime().toEpochMilli(); - boolean isMultiEntityDetector = detector.isMultientityDetector(); + boolean isMultiEntityDetector = detector.isHighCardinality(); int totalResponsesToWait = 0; if (profilesToCollect.contains(DetectorProfileName.ERROR)) { @@ -284,8 +284,8 @@ private void prepareProfile( } private void profileEntityStats(MultiResponsesDelegateActionListener listener, AnomalyDetector detector) { - List categoryField = detector.getCategoryField(); - if (!detector.isMultientityDetector() || categoryField.size() > ADNumericSetting.maxCategoricalFields()) { + List categoryField = detector.getCategoryFields(); + if (!detector.isHighCardinality() || categoryField.size() > ADNumericSetting.maxCategoricalFields()) { listener.onResponse(new DetectorProfile.Builder().build()); } else { if (categoryField.size() == 1) { @@ -304,7 +304,7 @@ private void profileEntityStats(MultiResponsesDelegateActionListener { - logger.warn(ADCommonMessages.FAIL_TO_GET_TOTAL_ENTITIES + detector.getDetectorId()); + logger.warn(ADCommonMessages.FAIL_TO_GET_TOTAL_ENTITIES + detector.getId()); listener.onFailure(searchException); }); // using the original context in listener as user roles have no permissions for internal operations like fetching a @@ -313,7 +313,7 @@ private void profileEntityStats(MultiResponsesDelegateActionListenerasyncRequestWithInjectedSecurity( request, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -322,7 +322,11 @@ private void profileEntityStats(MultiResponsesDelegateActionListener new TermsValuesSourceBuilder(f).field(f)).collect(Collectors.toList()) + detector + .getCategoryFields() + .stream() + .map(f -> new TermsValuesSourceBuilder(f).field(f)) + .collect(Collectors.toList()) ) .size(maxTotalEntitiesToTrack); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().aggregation(bucketAggs).trackTotalHits(false).size(0); @@ -353,7 +357,7 @@ private void profileEntityStats(MultiResponsesDelegateActionListener { - logger.warn(ADCommonMessages.FAIL_TO_GET_TOTAL_ENTITIES + detector.getDetectorId()); + logger.warn(ADCommonMessages.FAIL_TO_GET_TOTAL_ENTITIES + detector.getId()); listener.onFailure(searchException); }); // using the original context in listener as user roles have no permissions for internal operations like fetching a @@ -362,7 +366,7 @@ private void profileEntityStats(MultiResponsesDelegateActionListenerasyncRequestWithInjectedSecurity( searchRequest, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -400,7 +404,7 @@ private void profileStateRelated( Set profilesToCollect ) { if (enabled) { - RCFPollingRequest request = new RCFPollingRequest(detector.getDetectorId()); + RCFPollingRequest request = new RCFPollingRequest(detector.getId()); client.execute(RCFPollingAction.INSTANCE, request, onPollRCFUpdates(detector, profilesToCollect, listener)); } else { DetectorProfile.Builder builder = new DetectorProfile.Builder(); @@ -419,7 +423,7 @@ private void profileModels( MultiResponsesDelegateActionListener listener ) { DiscoveryNode[] dataNodes = nodeFilter.getEligibleDataNodes(); - ProfileRequest profileRequest = new ProfileRequest(detector.getDetectorId(), profiles, forMultiEntityDetector, dataNodes); + ProfileRequest profileRequest = new ProfileRequest(detector.getId(), profiles, forMultiEntityDetector, dataNodes); client.execute(ProfileAction.INSTANCE, profileRequest, onModelResponse(detector, profiles, job, listener));// get init progress } @@ -429,7 +433,7 @@ private ActionListener onModelResponse( AnomalyDetectorJob job, MultiResponsesDelegateActionListener listener ) { - boolean isMultientityDetector = detector.isMultientityDetector(); + boolean isMultientityDetector = detector.isHighCardinality(); return ActionListener.wrap(profileResponse -> { DetectorProfile.Builder profile = new DetectorProfile.Builder(); if (profilesToCollect.contains(DetectorProfileName.COORDINATING_NODE)) { @@ -516,7 +520,7 @@ private ActionListener onInittedEver( logger .error( "Fail to find any anomaly result with anomaly score larger than 0 after AD job enabled time for detector {}", - detector.getDetectorId() + detector.getId() ); listener.onFailure(exception); } @@ -565,11 +569,7 @@ private ActionListener onPollRCFUpdates( // data exists. processInitResponse(detector, profilesToCollect, 0L, true, new DetectorProfile.Builder(), listener); } else { - logger - .error( - new ParameterizedMessage("Fail to get init progress through messaging for {}", detector.getDetectorId()), - exception - ); + logger.error(new ParameterizedMessage("Fail to get init progress through messaging for {}", detector.getId()), exception); listener.onFailure(exception); } }); @@ -603,7 +603,7 @@ private void processInitResponse( InitProgressProfile initProgress = computeInitProgressProfile(totalUpdates, 0); builder.initProgress(initProgress); } else { - long intervalMins = ((IntervalTimeConfiguration) detector.getDetectionInterval()).toDuration().toMinutes(); + long intervalMins = ((IntervalTimeConfiguration) detector.getInterval()).toDuration().toMinutes(); InitProgressProfile initProgress = computeInitProgressProfile(totalUpdates, intervalMins); builder.initProgress(initProgress); } diff --git a/src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java b/src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java index cf01f9008..2dedbfc43 100644 --- a/src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java +++ b/src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java @@ -72,7 +72,7 @@ public void executeDetector( ActionListener> listener ) throws IOException { context.restore(); - List categoryField = detector.getCategoryField(); + List categoryField = detector.getCategoryFields(); if (categoryField != null && !categoryField.isEmpty()) { featureManager.getPreviewEntities(detector, startTime.toEpochMilli(), endTime.toEpochMilli(), ActionListener.wrap(entities -> { @@ -86,13 +86,13 @@ public void executeDetector( ActionListener entityAnomalyResultListener = ActionListener .wrap( entityAnomalyResult -> { listener.onResponse(entityAnomalyResult.getAnomalyResults()); }, - e -> onFailure(e, listener, detector.getDetectorId()) + e -> onFailure(e, listener, detector.getId()) ); MultiResponsesDelegateActionListener multiEntitiesResponseListener = new MultiResponsesDelegateActionListener( entityAnomalyResultListener, entities.size(), - String.format(Locale.ROOT, "Fail to get preview result for multi entity detector %s", detector.getDetectorId()), + String.format(Locale.ROOT, "Fail to get preview result for multi entity detector %s", detector.getId()), true ); for (Entity entity : entities) { @@ -113,7 +113,7 @@ public void executeDetector( }, e -> multiEntitiesResponseListener.onFailure(e)) ); } - }, e -> onFailure(e, listener, detector.getDetectorId()))); + }, e -> onFailure(e, listener, detector.getId()))); } else { featureManager.getPreviewFeatures(detector, startTime.toEpochMilli(), endTime.toEpochMilli(), ActionListener.wrap(features -> { try { @@ -121,9 +121,9 @@ public void executeDetector( .getPreviewResults(features.getProcessedFeatures(), detector.getShingleSize()); listener.onResponse(sample(parsePreviewResult(detector, features, results, null), maxPreviewResults)); } catch (Exception e) { - onFailure(e, listener, detector.getDetectorId()); + onFailure(e, listener, detector.getId()); } - }, e -> onFailure(e, listener, detector.getDetectorId()))); + }, e -> onFailure(e, listener, detector.getId()))); } } @@ -184,7 +184,7 @@ private List parsePreviewResult( ); } else { result = new AnomalyResult( - detector.getDetectorId(), + detector.getId(), null, featureDatas, Instant.ofEpochMilli(timeRange.getKey()), diff --git a/src/main/java/org/opensearch/ad/EntityProfileRunner.java b/src/main/java/org/opensearch/ad/EntityProfileRunner.java index cf47d002a..042098095 100644 --- a/src/main/java/org/opensearch/ad/EntityProfileRunner.java +++ b/src/main/java/org/opensearch/ad/EntityProfileRunner.java @@ -105,7 +105,7 @@ public void profile( ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); AnomalyDetector detector = AnomalyDetector.parse(parser, detectorId); - List categoryFields = detector.getCategoryField(); + List categoryFields = detector.getCategoryFields(); int maxCategoryFields = ADNumericSetting.maxCategoricalFields(); if (categoryFields == null || categoryFields.size() == 0) { listener.onFailure(new IllegalArgumentException(NOT_HC_DETECTOR_ERR_MSG)); @@ -186,7 +186,7 @@ private void validateEntity( .asyncRequestWithInjectedSecurity( searchRequest, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -277,7 +277,7 @@ private void getJob( detectorId, enabledTimeMs, entityValue, - detector.getResultIndex() + detector.getCustomResultIndex() ); EntityProfile.Builder builder = new EntityProfile.Builder(); @@ -397,7 +397,7 @@ private void sendInitState( builder.state(EntityState.INIT); } if (profilesToCollect.contains(EntityProfileName.INIT_PROGRESS)) { - long intervalMins = ((IntervalTimeConfiguration) detector.getDetectionInterval()).toDuration().toMinutes(); + long intervalMins = ((IntervalTimeConfiguration) detector.getInterval()).toDuration().toMinutes(); InitProgressProfile initProgress = computeInitProgressProfile(updates, intervalMins); builder.initProgress(initProgress); } diff --git a/src/main/java/org/opensearch/ad/ExecuteADResultResponseRecorder.java b/src/main/java/org/opensearch/ad/ExecuteADResultResponseRecorder.java index 5c4e18e10..aad944277 100644 --- a/src/main/java/org/opensearch/ad/ExecuteADResultResponseRecorder.java +++ b/src/main/java/org/opensearch/ad/ExecuteADResultResponseRecorder.java @@ -92,7 +92,7 @@ public void indexAnomalyResult( AnomalyResultResponse response, AnomalyDetector detector ) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); try { // skipping writing to the result index if not necessary // For a single-entity detector, the result is not useful if error is null @@ -124,7 +124,7 @@ public void indexAnomalyResult( response.getError() ); - String resultIndex = detector.getResultIndex(); + String resultIndex = detector.getCustomResultIndex(); anomalyResultHandler.index(anomalyResult, detectorId, resultIndex); updateRealtimeTask(response, detectorId); } catch (EndRunException e) { @@ -156,13 +156,7 @@ private void updateRealtimeTask(AnomalyResultResponse response, String detectorI Runnable profileHCInitProgress = () -> { client.execute(ProfileAction.INSTANCE, profileRequest, ActionListener.wrap(r -> { log.debug("Update latest realtime task for HC detector {}, total updates: {}", detectorId, r.getTotalUpdates()); - updateLatestRealtimeTask( - detectorId, - null, - r.getTotalUpdates(), - response.getDetectorIntervalInMinutes(), - response.getError() - ); + updateLatestRealtimeTask(detectorId, null, r.getTotalUpdates(), response.getIntervalInMinutes(), response.getError()); }, e -> { log.error("Failed to update latest realtime task for " + detectorId, e); })); }; if (!adTaskManager.isHCRealtimeTaskStartInitializing(detectorId)) { @@ -181,13 +175,7 @@ private void updateRealtimeTask(AnomalyResultResponse response, String detectorI detectorId, response.getRcfTotalUpdates() ); - updateLatestRealtimeTask( - detectorId, - null, - response.getRcfTotalUpdates(), - response.getDetectorIntervalInMinutes(), - response.getError() - ); + updateLatestRealtimeTask(detectorId, null, response.getRcfTotalUpdates(), response.getIntervalInMinutes(), response.getError()); } } @@ -278,7 +266,7 @@ public void indexAnomalyResultException( String taskState, AnomalyDetector detector ) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); try { IntervalTimeConfiguration windowDelay = (IntervalTimeConfiguration) detector.getWindowDelay(); Instant dataStartTime = detectionStartTime.minus(windowDelay.getInterval(), windowDelay.getUnit()); @@ -299,7 +287,7 @@ public void indexAnomalyResultException( anomalyDetectionIndices.getSchemaVersion(ADIndex.RESULT), null // no model id ); - String resultIndex = detector.getResultIndex(); + String resultIndex = detector.getCustomResultIndex(); if (resultIndex != null && !anomalyDetectionIndices.doesIndexExist(resultIndex)) { // Set result index as null, will write exception to default result index. anomalyResultHandler.index(anomalyResult, detectorId, null); @@ -307,7 +295,7 @@ public void indexAnomalyResultException( anomalyResultHandler.index(anomalyResult, detectorId, resultIndex); } - if (errorMessage.contains(ADCommonMessages.NO_MODEL_ERR_MSG) && !detector.isMultiCategoryDetector()) { + if (errorMessage.contains(ADCommonMessages.NO_MODEL_ERR_MSG) && !detector.isHighCardinality()) { // single stream detector raises ResourceNotFoundException containing CommonErrorMessages.NO_CHECKPOINT_ERR_MSG // when there is no checkpoint. // Delay real time cache update by one minute so we will have trained models by then and update the state @@ -321,7 +309,7 @@ public void indexAnomalyResultException( detectorId, taskState, totalUpdates, - detector.getDetectorIntervalInMinutes(), + detector.getIntervalInMinutes(), totalUpdates > 0 ? "" : errorMessage ); }, e -> { diff --git a/src/main/java/org/opensearch/ad/NodeState.java b/src/main/java/org/opensearch/ad/NodeState.java index c12a91deb..9c4693cbd 100644 --- a/src/main/java/org/opensearch/ad/NodeState.java +++ b/src/main/java/org/opensearch/ad/NodeState.java @@ -58,7 +58,7 @@ public NodeState(String detectorId, Clock clock) { this.detectorJob = null; } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/NodeStateManager.java b/src/main/java/org/opensearch/ad/NodeStateManager.java index e99cfdbe8..0f5d32e35 100644 --- a/src/main/java/org/opensearch/ad/NodeStateManager.java +++ b/src/main/java/org/opensearch/ad/NodeStateManager.java @@ -39,11 +39,11 @@ import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.lease.Releasable; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.common.lease.Releasable; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.timeseries.common.exception.EndRunException; diff --git a/src/main/java/org/opensearch/ad/ProfileUtil.java b/src/main/java/org/opensearch/ad/ProfileUtil.java index 835f99219..4d7563890 100644 --- a/src/main/java/org/opensearch/ad/ProfileUtil.java +++ b/src/main/java/org/opensearch/ad/ProfileUtil.java @@ -59,11 +59,7 @@ public static void confirmDetectorRealtimeInitStatus( Client client, ActionListener listener ) { - SearchRequest searchLatestResult = createRealtimeInittedEverRequest( - detector.getDetectorId(), - enabledTime, - detector.getResultIndex() - ); + SearchRequest searchLatestResult = createRealtimeInittedEverRequest(detector.getId(), enabledTime, detector.getCustomResultIndex()); client.search(searchLatestResult, listener); } } diff --git a/src/main/java/org/opensearch/ad/caching/CacheBuffer.java b/src/main/java/org/opensearch/ad/caching/CacheBuffer.java index bed7052c9..d9ec0143d 100644 --- a/src/main/java/org/opensearch/ad/caching/CacheBuffer.java +++ b/src/main/java/org/opensearch/ad/caching/CacheBuffer.java @@ -517,7 +517,7 @@ public boolean expired(Duration stateTtl) { return expired(lastUsedTime, stateTtl, clock.instant()); } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/caching/PriorityCache.java b/src/main/java/org/opensearch/ad/caching/PriorityCache.java index d7c25fe0b..3d7dcd383 100644 --- a/src/main/java/org/opensearch/ad/caching/PriorityCache.java +++ b/src/main/java/org/opensearch/ad/caching/PriorityCache.java @@ -156,7 +156,7 @@ public PriorityCache( @Override public ModelState get(String modelId, AnomalyDetector detector) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); CacheBuffer buffer = computeBufferIfAbsent(detector, detectorId); ModelState modelState = buffer.get(modelId); @@ -171,7 +171,7 @@ public ModelState get(String modelId, AnomalyDetector detector) { return new DoorKeeper( TimeSeriesSettings.DOOR_KEEPER_FOR_CACHE_MAX_INSERTION, TimeSeriesSettings.DOOR_KEEPER_FALSE_POSITIVE_RATE, - detector.getDetectionIntervalDuration().multipliedBy(TimeSeriesSettings.DOOR_KEEPER_MAINTENANCE_FREQ), + detector.getIntervalDuration().multipliedBy(TimeSeriesSettings.DOOR_KEEPER_MAINTENANCE_FREQ), clock ); } @@ -251,7 +251,7 @@ public boolean hostIfPossible(AnomalyDetector detector, ModelState return false; } String modelId = toUpdate.getModelId(); - String detectorId = toUpdate.getDetectorId(); + String detectorId = toUpdate.getId(); if (Strings.isEmpty(modelId) || Strings.isEmpty(detectorId)) { return false; @@ -462,7 +462,7 @@ private CacheBuffer computeBufferIfAbsent(AnomalyDetector detector, String detec long requiredBytes = getRequiredMemory(detector, dedicatedCacheSize); if (memoryTracker.canAllocateReserved(requiredBytes)) { memoryTracker.consumeMemory(requiredBytes, true, Origin.HC_DETECTOR); - long intervalSecs = detector.getDetectorIntervalInSeconds(); + long intervalSecs = detector.getIntervalInSeconds(); buffer = new CacheBuffer( dedicatedCacheSize, diff --git a/src/main/java/org/opensearch/ad/cluster/ADDataMigrator.java b/src/main/java/org/opensearch/ad/cluster/ADDataMigrator.java index e4832a663..f74d237df 100644 --- a/src/main/java/org/opensearch/ad/cluster/ADDataMigrator.java +++ b/src/main/java/org/opensearch/ad/cluster/ADDataMigrator.java @@ -244,13 +244,13 @@ private void createRealtimeADTask( try (XContentParser parser = createXContentParserFromRegistry(xContentRegistry, r.getSourceAsBytesRef())) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); AnomalyDetector detector = AnomalyDetector.parse(parser, r.getId()); - ADTaskType taskType = detector.isMultientityDetector() + ADTaskType taskType = detector.isHighCardinality() ? ADTaskType.REALTIME_HC_DETECTOR : ADTaskType.REALTIME_SINGLE_ENTITY; Instant now = Instant.now(); String userName = job.getUser() != null ? job.getUser().getName() : null; ADTask adTask = new ADTask.Builder() - .detectorId(detector.getDetectorId()) + .detectorId(detector.getId()) .detector(detector) .error(error) .isLatest(true) diff --git a/src/main/java/org/opensearch/ad/feature/CompositeRetriever.java b/src/main/java/org/opensearch/ad/feature/CompositeRetriever.java index 037cb8fc0..f51b0e857 100644 --- a/src/main/java/org/opensearch/ad/feature/CompositeRetriever.java +++ b/src/main/java/org/opensearch/ad/feature/CompositeRetriever.java @@ -157,7 +157,7 @@ public PageIterator iterator() throws IOException { CompositeAggregationBuilder composite = AggregationBuilders .composite( AGG_NAME_COMP, - anomalyDetector.getCategoryField().stream().map(f -> new TermsValuesSourceBuilder(f).field(f)).collect(Collectors.toList()) + anomalyDetector.getCategoryFields().stream().map(f -> new TermsValuesSourceBuilder(f).field(f)).collect(Collectors.toList()) ) .size(pageSize); for (Feature feature : anomalyDetector.getFeatureAttributes()) { @@ -218,7 +218,7 @@ public void onFailure(Exception e) { .asyncRequestWithInjectedSecurity( searchRequest, client::search, - anomalyDetector.getDetectorId(), + anomalyDetector.getId(), client, searchResponseListener ); diff --git a/src/main/java/org/opensearch/ad/feature/FeatureManager.java b/src/main/java/org/opensearch/ad/feature/FeatureManager.java index a188c0611..1f43bdda7 100644 --- a/src/main/java/org/opensearch/ad/feature/FeatureManager.java +++ b/src/main/java/org/opensearch/ad/feature/FeatureManager.java @@ -145,10 +145,10 @@ public void getCurrentFeatures(AnomalyDetector detector, long startTime, long en int shingleSize = detector.getShingleSize(); Deque>> shingle = detectorIdsToTimeShingles - .computeIfAbsent(detector.getDetectorId(), id -> new ArrayDeque<>(shingleSize)); + .computeIfAbsent(detector.getId(), id -> new ArrayDeque<>(shingleSize)); // To allow for small time variations/delays in running the detector. - long maxTimeDifference = detector.getDetectorIntervalInMilliseconds() / 2; + long maxTimeDifference = detector.getIntervalInMilliseconds() / 2; Map>> featuresMap = getNearbyPointsForShingle(detector, shingle, endTime, maxTimeDifference) .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); @@ -165,7 +165,7 @@ public void getCurrentFeatures(AnomalyDetector detector, long startTime, long en updateUnprocessedFeatures(detector, shingle, featuresMap, endTime, listener); }, listener::onFailure)); } catch (IOException e) { - listener.onFailure(new EndRunException(detector.getDetectorId(), CommonMessages.INVALID_SEARCH_QUERY_MSG, e, true)); + listener.onFailure(new EndRunException(detector.getId(), CommonMessages.INVALID_SEARCH_QUERY_MSG, e, true)); } } else { listener.onResponse(getProcessedFeatures(shingle, detector, endTime)); @@ -177,7 +177,7 @@ private List> getMissingRangesInShingle( Map>> featuresMap, long endTime ) { - long intervalMilli = detector.getDetectorIntervalInMilliseconds(); + long intervalMilli = detector.getIntervalInMilliseconds(); int shingleSize = detector.getShingleSize(); return getFullShingleEndTimes(endTime, intervalMilli, shingleSize) .filter(time -> !featuresMap.containsKey(time)) @@ -212,7 +212,7 @@ private void updateUnprocessedFeatures( ActionListener listener ) { shingle.clear(); - getFullShingleEndTimes(endTime, detector.getDetectorIntervalInMilliseconds(), detector.getShingleSize()) + getFullShingleEndTimes(endTime, detector.getIntervalInMilliseconds(), detector.getShingleSize()) .mapToObj(time -> featuresMap.getOrDefault(time, new SimpleImmutableEntry<>(time, Optional.empty()))) .forEach(e -> shingle.add(e)); @@ -228,7 +228,7 @@ private double[][] filterAndFill(Deque>> shingle, double[][] result = null; if (filteredShingle.size() >= shingleSize - getMaxMissingPoints(shingleSize)) { // Imputes missing data points with the values of neighboring data points. - long maxMillisecondsDifference = maxNeighborDistance * detector.getDetectorIntervalInMilliseconds(); + long maxMillisecondsDifference = maxNeighborDistance * detector.getIntervalInMilliseconds(); result = getNearbyPointsForShingle(detector, filteredShingle, endTime, maxMillisecondsDifference) .map(e -> e.getValue().getValue().orElse(null)) .filter(d -> d != null) @@ -257,7 +257,7 @@ private Stream>>> getNearbyPointsForS long endTime, long maxMillisecondsDifference ) { - long intervalMilli = detector.getDetectorIntervalInMilliseconds(); + long intervalMilli = detector.getIntervalInMilliseconds(); int shingleSize = detector.getShingleSize(); TreeMap> search = new TreeMap<>( shingle.stream().collect(Collectors.toMap(Entry::getKey, Entry::getValue)) @@ -309,7 +309,7 @@ private void getColdStartSamples(Optional latest, AnomalyDetector detector new ThreadedActionListener<>(logger, threadPool, adThreadPoolName, getFeaturesListener, false) ); } catch (IOException e) { - listener.onFailure(new EndRunException(detector.getDetectorId(), CommonMessages.INVALID_SEARCH_QUERY_MSG, e, true)); + listener.onFailure(new EndRunException(detector.getId(), CommonMessages.INVALID_SEARCH_QUERY_MSG, e, true)); } } else { listener.onResponse(Optional.empty()); @@ -359,7 +359,7 @@ private Optional fillAndShingle(LinkedList> shingle } private List> getColdStartSampleRanges(AnomalyDetector detector, long endMillis) { - long interval = detector.getDetectorIntervalInMilliseconds(); + long interval = detector.getIntervalInMilliseconds(); int numSamples = Math.max((int) (Duration.ofHours(this.trainSampleTimeRangeInHours).toMillis() / interval), this.minTrainSamples); return IntStream .rangeClosed(1, numSamples) @@ -518,7 +518,7 @@ public void getPreviewFeatures(AnomalyDetector detector, long startMilli, long e private Entry>, Integer> getSampleRanges(AnomalyDetector detector, long startMilli, long endMilli) { long start = truncateToMinute(startMilli); long end = truncateToMinute(endMilli); - long bucketSize = detector.getDetectorIntervalInMilliseconds(); + long bucketSize = detector.getIntervalInMilliseconds(); int numBuckets = (int) Math.floor((end - start) / (double) bucketSize); int numSamples = (int) Math.max(Math.min(numBuckets * previewSampleRate, maxPreviewSamples), 1); int stride = (int) Math.max(1, Math.floor((double) numBuckets / numSamples)); @@ -658,7 +658,7 @@ public void getFeatureDataPointsByBatch( listener.onResponse(points); }, listener::onFailure)); } catch (Exception e) { - logger.error("Failed to get features for detector: " + detector.getDetectorId()); + logger.error("Failed to get features for detector: " + detector.getId()); listener.onFailure(e); } } diff --git a/src/main/java/org/opensearch/ad/feature/SearchFeatureDao.java b/src/main/java/org/opensearch/ad/feature/SearchFeatureDao.java index 73d8266ad..cf9f0912b 100644 --- a/src/main/java/org/opensearch/ad/feature/SearchFeatureDao.java +++ b/src/main/java/org/opensearch/ad/feature/SearchFeatureDao.java @@ -180,7 +180,7 @@ public void getLatestDataTime(AnomalyDetector detector, ActionListenerasyncRequestWithInjectedSecurity( searchRequest, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -216,7 +216,7 @@ public void getHighestCountEntities( int pageSize, ActionListener> listener ) { - if (!detector.isMultientityDetector()) { + if (!detector.isHighCardinality()) { listener.onResponse(null); return; } @@ -231,8 +231,8 @@ public void getHighestCountEntities( BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery().filter(rangeQuery).filter(detector.getFilterQuery()); AggregationBuilder bucketAggs = null; - if (detector.getCategoryField().size() == 1) { - bucketAggs = AggregationBuilders.terms(AGG_NAME_TOP).size(maxEntitiesSize).field(detector.getCategoryField().get(0)); + if (detector.getCategoryFields().size() == 1) { + bucketAggs = AggregationBuilders.terms(AGG_NAME_TOP).size(maxEntitiesSize).field(detector.getCategoryFields().get(0)); } else { /* * We don't have an efficient solution for terms aggregation on multiple fields. @@ -328,7 +328,7 @@ public void getHighestCountEntities( bucketAggs = AggregationBuilders .composite( AGG_NAME_TOP, - detector.getCategoryField().stream().map(f -> new TermsValuesSourceBuilder(f).field(f)).collect(Collectors.toList()) + detector.getCategoryFields().stream().map(f -> new TermsValuesSourceBuilder(f).field(f)).collect(Collectors.toList()) ) .size(pageSize) .subAggregation( @@ -359,7 +359,7 @@ public void getHighestCountEntities( .asyncRequestWithInjectedSecurity( searchRequest, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -410,14 +410,14 @@ public void onResponse(SearchResponse response) { return; } - if (detector.getCategoryField().size() == 1) { + if (detector.getCategoryFields().size() == 1) { topEntities = ((Terms) aggrResult) .getBuckets() .stream() .map(bucket -> bucket.getKeyAsString()) .collect(Collectors.toList()) .stream() - .map(entityValue -> Entity.createSingleAttributeEntity(detector.getCategoryField().get(0), entityValue)) + .map(entityValue -> Entity.createSingleAttributeEntity(detector.getCategoryFields().get(0), entityValue)) .collect(Collectors.toList()); listener.onResponse(topEntities); } else { @@ -451,7 +451,7 @@ public void onResponse(SearchResponse response) { .asyncRequestWithInjectedSecurity( new SearchRequest().indices(detector.getIndices().toArray(new String[0])).source(searchSourceBuilder), client::search, - detector.getDetectorId(), + detector.getId(), client, this ); @@ -495,7 +495,7 @@ public void getEntityMinDataTime(AnomalyDetector detector, Entity entity, Action .asyncRequestWithInjectedSecurity( searchRequest, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -528,7 +528,7 @@ public void getFeaturesForPeriod(AnomalyDetector detector, long startTime, long .asyncRequestWithInjectedSecurity( searchRequest, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -542,7 +542,7 @@ public void getFeaturesForPeriodByBatch( ActionListener>> listener ) throws IOException { SearchSourceBuilder searchSourceBuilder = batchFeatureQuery(detector, entity, startTime, endTime, xContent); - logger.debug("Batch query for detector {}: {} ", detector.getDetectorId(), searchSourceBuilder); + logger.debug("Batch query for detector {}: {} ", detector.getId(), searchSourceBuilder); SearchRequest searchRequest = new SearchRequest(detector.getIndices().toArray(new String[0])).source(searchSourceBuilder); final ActionListener searchResponseListener = ActionListener @@ -555,7 +555,7 @@ public void getFeaturesForPeriodByBatch( .asyncRequestWithInjectedSecurity( searchRequest, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -618,7 +618,7 @@ public void getFeatureSamplesForPeriods( .asyncRequestWithInjectedSecurity( request, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -645,7 +645,7 @@ public void getFeaturesForSampledPeriods( ActionListener>> listener ) { Map cache = new HashMap<>(); - logger.info(String.format(Locale.ROOT, "Getting features for detector %s ending at %d", detector.getDetectorId(), endTime)); + logger.info(String.format(Locale.ROOT, "Getting features for detector %s ending at %d", detector.getId(), endTime)); getFeatureSamplesWithCache(detector, maxSamples, maxStride, endTime, cache, maxStride, listener); } @@ -699,7 +699,7 @@ private void processFeatureSamplesForStride( .format( Locale.ROOT, "Get features for detector %s finishes without any features present, current stride %d", - detector.getDetectorId(), + detector.getId(), currentStride ) ); @@ -711,7 +711,7 @@ private void processFeatureSamplesForStride( .format( Locale.ROOT, "Get features for detector %s finishes with %d samples, current stride %d", - detector.getDetectorId(), + detector.getId(), features.get().length, currentStride ) @@ -733,7 +733,7 @@ private void getFeatureSamplesForStride( ) { ArrayDeque sampledFeatures = new ArrayDeque<>(maxSamples); boolean isInterpolatable = currentStride < maxStride; - long span = ((IntervalTimeConfiguration) detector.getDetectionInterval()).toDuration().toMillis(); + long span = ((IntervalTimeConfiguration) detector.getInterval()).toDuration().toMillis(); sampleForIteration(detector, cache, maxSamples, endTime, span, currentStride, sampledFeatures, isInterpolatable, 0, listener); } @@ -838,11 +838,7 @@ private SearchRequest createFeatureSearchRequest(AnomalyDetector detector, long SearchSourceBuilder searchSourceBuilder = ParseUtils.generateInternalFeatureQuery(detector, startTime, endTime, xContent); return new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder).preference(preference.orElse(null)); } catch (IOException e) { - logger - .warn( - "Failed to create feature search request for " + detector.getDetectorId() + " from " + startTime + " to " + endTime, - e - ); + logger.warn("Failed to create feature search request for " + detector.getId() + " from " + startTime + " to " + endTime, e); throw new IllegalStateException(e); } } @@ -852,7 +848,7 @@ private SearchRequest createPreviewSearchRequest(AnomalyDetector detector, List< SearchSourceBuilder searchSourceBuilder = ParseUtils.generatePreviewQuery(detector, ranges, xContent); return new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder); } catch (IOException e) { - logger.warn("Failed to create feature search request for " + detector.getDetectorId() + " for preview", e); + logger.warn("Failed to create feature search request for " + detector.getId() + " for preview", e); throw e; } } @@ -904,7 +900,7 @@ public void getColdStartSamplesForPeriods( .asyncRequestWithInjectedSecurity( request, client::search, - detector.getDetectorId(), + detector.getId(), client, searchResponseListener ); @@ -918,7 +914,7 @@ private SearchRequest createColdStartFeatureSearchRequest(AnomalyDetector detect logger .warn( "Failed to create cold start feature search request for " - + detector.getDetectorId() + + detector.getId() + " from " + ranges.get(0).getKey() + " to " diff --git a/src/main/java/org/opensearch/ad/ml/CheckpointDao.java b/src/main/java/org/opensearch/ad/ml/CheckpointDao.java index 538f7e568..0d070cf0d 100644 --- a/src/main/java/org/opensearch/ad/ml/CheckpointDao.java +++ b/src/main/java/org/opensearch/ad/ml/CheckpointDao.java @@ -305,7 +305,7 @@ public Map toIndexSource(ModelState modelState) thr ); return source; } - String detectorId = modelState.getDetectorId(); + String detectorId = modelState.getId(); source.put(DETECTOR_ID, detectorId); // we cannot pass Optional as OpenSearch does not know how to serialize an Optional value source.put(FIELD_MODELV2, serializedModel.get()); diff --git a/src/main/java/org/opensearch/ad/ml/EntityColdStarter.java b/src/main/java/org/opensearch/ad/ml/EntityColdStarter.java index 70795a982..036a2e28d 100644 --- a/src/main/java/org/opensearch/ad/ml/EntityColdStarter.java +++ b/src/main/java/org/opensearch/ad/ml/EntityColdStarter.java @@ -253,7 +253,7 @@ private void coldStart( return new DoorKeeper( TimeSeriesSettings.DOOR_KEEPER_FOR_COLD_STARTER_MAX_INSERTION, TimeSeriesSettings.DOOR_KEEPER_FALSE_POSITIVE_RATE, - detector.getDetectionIntervalDuration().multipliedBy(TimeSeriesSettings.DOOR_KEEPER_MAINTENANCE_FREQ), + detector.getIntervalDuration().multipliedBy(TimeSeriesSettings.DOOR_KEEPER_MAINTENANCE_FREQ), clock ); } @@ -467,7 +467,7 @@ private void getFeatures( long startTimeMs, long endTimeMs ) { - if (startTimeMs >= endTimeMs || endTimeMs - startTimeMs < detector.getDetectorIntervalInMilliseconds()) { + if (startTimeMs >= endTimeMs || endTimeMs - startTimeMs < detector.getIntervalInMilliseconds()) { listener.onResponse(Optional.of(lastRoundColdStartData)); return; } @@ -597,7 +597,7 @@ private int calculateColdStartDataSize(List coldStartData) { private Pair selectRangeParam(AnomalyDetector detector) { int shingleSize = detector.getShingleSize(); if (ADEnabledSetting.isInterpolationInColdStartEnabled()) { - long delta = detector.getDetectorIntervalInMinutes(); + long delta = detector.getIntervalInMinutes(); int strideLength = defaulStrideLength; int numberOfSamples = defaultNumberOfSamples; @@ -632,7 +632,7 @@ private List> getTrainSampleRanges( int stride, int numberOfSamples ) { - long bucketSize = ((IntervalTimeConfiguration) detector.getDetectionInterval()).toDuration().toMillis(); + long bucketSize = ((IntervalTimeConfiguration) detector.getInterval()).toDuration().toMillis(); int numBuckets = (int) Math.floor((endMilli - startMilli) / (double) bucketSize); // adjust if numStrides is more than the max samples int numStrides = Math.min((int) Math.floor(numBuckets / (double) stride), numberOfSamples); diff --git a/src/main/java/org/opensearch/ad/ml/ModelManager.java b/src/main/java/org/opensearch/ad/ml/ModelManager.java index 04a5b6341..045c720aa 100644 --- a/src/main/java/org/opensearch/ad/ml/ModelManager.java +++ b/src/main/java/org/opensearch/ad/ml/ModelManager.java @@ -534,7 +534,7 @@ private void trainModelForStep( .build(); Arrays.stream(dataPoints).forEach(s -> trcf.process(s, 0)); - String modelId = SingleStreamModelIdMapper.getRcfModelId(detector.getDetectorId(), step); + String modelId = SingleStreamModelIdMapper.getRcfModelId(detector.getId(), step); checkpointDao.putTRCFCheckpoint(modelId, trcf, ActionListener.wrap(r -> listener.onResponse(null), listener::onFailure)); } diff --git a/src/main/java/org/opensearch/ad/ml/ModelState.java b/src/main/java/org/opensearch/ad/ml/ModelState.java index ce9411147..9e909bc58 100644 --- a/src/main/java/org/opensearch/ad/ml/ModelState.java +++ b/src/main/java/org/opensearch/ad/ml/ModelState.java @@ -111,7 +111,7 @@ public String getModelId() { * * @return detectorId associated with the model */ - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/ml/ThresholdingResult.java b/src/main/java/org/opensearch/ad/ml/ThresholdingResult.java index 4d8f72876..a1fd6d52d 100644 --- a/src/main/java/org/opensearch/ad/ml/ThresholdingResult.java +++ b/src/main/java/org/opensearch/ad/ml/ThresholdingResult.java @@ -317,8 +317,8 @@ public AnomalyResult toAnomalyResult( ) { return AnomalyResult .fromRawTRCFResult( - detector.getDetectorId(), - detector.getDetectorIntervalInMilliseconds(), + detector.getId(), + detector.getIntervalInMilliseconds(), taskId, rcfScore, grade, diff --git a/src/main/java/org/opensearch/ad/model/ADTask.java b/src/main/java/org/opensearch/ad/model/ADTask.java index 1d3841c17..0bad4a4c2 100644 --- a/src/main/java/org/opensearch/ad/model/ADTask.java +++ b/src/main/java/org/opensearch/ad/model/ADTask.java @@ -572,15 +572,16 @@ public static ADTask parse(XContentParser parser, String taskId) throws IOExcept detector.getIndices(), detector.getFeatureAttributes(), detector.getFilterQuery(), - detector.getDetectionInterval(), + detector.getInterval(), detector.getWindowDelay(), detector.getShingleSize(), detector.getUiMetadata(), detector.getSchemaVersion(), detector.getLastUpdateTime(), - detector.getCategoryField(), + detector.getCategoryFields(), detector.getUser(), - detector.getResultIndex() + detector.getCustomResultIndex(), + detector.getImputationOption() ); return new Builder() .taskId(parsedTaskId) @@ -623,7 +624,7 @@ public boolean equals(Object o) { && Objects.equal(getStoppedBy(), that.getStoppedBy()) && Objects.equal(getError(), that.getError()) && Objects.equal(getState(), that.getState()) - && Objects.equal(getDetectorId(), that.getDetectorId()) + && Objects.equal(getId(), that.getId()) && Objects.equal(getTaskProgress(), that.getTaskProgress()) && Objects.equal(getInitProgress(), that.getInitProgress()) && Objects.equal(getCurrentPiece(), that.getCurrentPiece()) @@ -709,7 +710,7 @@ public void setState(String state) { this.state = state; } - public String getDetectorId() { + public String getId() { return detectorId; } @@ -766,7 +767,7 @@ public Entity getEntity() { } public String getEntityModelId() { - return entity == null ? null : entity.getModelId(getDetectorId()).orElse(null); + return entity == null ? null : entity.getModelId(getId()).orElse(null); } public String getParentTaskId() { diff --git a/src/main/java/org/opensearch/ad/model/AnomalyDetector.java b/src/main/java/org/opensearch/ad/model/AnomalyDetector.java index 7bbf2cad1..008f21e4b 100644 --- a/src/main/java/org/opensearch/ad/model/AnomalyDetector.java +++ b/src/main/java/org/opensearch/ad/model/AnomalyDetector.java @@ -11,66 +11,54 @@ package org.opensearch.ad.model; -import static org.opensearch.ad.constant.ADCommonMessages.INVALID_RESULT_INDEX_PREFIX; import static org.opensearch.ad.constant.ADCommonName.CUSTOM_RESULT_INDEX_PREFIX; import static org.opensearch.ad.model.AnomalyDetectorType.MULTI_ENTITY; import static org.opensearch.ad.model.AnomalyDetectorType.SINGLE_ENTITY; import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.opensearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; -import static org.opensearch.timeseries.constant.CommonMessages.INVALID_CHAR_IN_RESULT_INDEX_NAME; -import static org.opensearch.timeseries.settings.TimeSeriesSettings.DEFAULT_SHINGLE_SIZE; import java.io.IOException; -import java.time.Duration; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import org.apache.logging.log4j.util.Strings; import org.opensearch.ad.constant.ADCommonMessages; -import org.opensearch.ad.constant.CommonValue; import org.opensearch.ad.settings.ADNumericSetting; import org.opensearch.common.ParsingException; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; import org.opensearch.common.unit.TimeValue; import org.opensearch.commons.authuser.User; import org.opensearch.core.ParseField; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.ToXContent; -import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParseException; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.timeseries.annotation.Generated; import org.opensearch.timeseries.common.exception.ValidationException; import org.opensearch.timeseries.constant.CommonMessages; -import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.constant.CommonValue; +import org.opensearch.timeseries.dataprocessor.ImputationOption; +import org.opensearch.timeseries.model.Config; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.model.TimeConfiguration; import org.opensearch.timeseries.model.ValidationAspect; import org.opensearch.timeseries.model.ValidationIssueType; -import org.opensearch.timeseries.settings.TimeSeriesSettings; import org.opensearch.timeseries.util.ParseUtils; -import com.google.common.base.Objects; -import com.google.common.collect.ImmutableList; - /** * An AnomalyDetector is used to represent anomaly detection model(RCF) related parameters. * NOTE: If change detector config index mapping, you should change AD task index mapping as well. * TODO: Will replace detector config mapping in AD task with detector config setting directly \ * in code rather than config it in anomaly-detection-state.json file. */ -public class AnomalyDetector implements Writeable, ToXContentObject { +public class AnomalyDetector extends Config { public static final String PARSE_FIELD_NAME = "AnomalyDetector"; public static final NamedXContentRegistry.Entry XCONTENT_REGISTRY = new NamedXContentRegistry.Entry( @@ -78,59 +66,20 @@ public class AnomalyDetector implements Writeable, ToXContentObject { new ParseField(PARSE_FIELD_NAME), it -> parse(it) ); - public static final String NO_ID = ""; public static final String TYPE = "_doc"; - public static final String QUERY_PARAM_PERIOD_START = "period_start"; - public static final String QUERY_PARAM_PERIOD_END = "period_end"; - public static final String GENERAL_SETTINGS = "general_settings"; - - public static final String NAME_FIELD = "name"; - private static final String DESCRIPTION_FIELD = "description"; - public static final String TIMEFIELD_FIELD = "time_field"; - public static final String INDICES_FIELD = "indices"; - public static final String FILTER_QUERY_FIELD = "filter_query"; - public static final String FEATURE_ATTRIBUTES_FIELD = "feature_attributes"; + // for bwc, we have to keep this field instead of reusing an interval field in the super class. + // otherwise, we won't be able to recognize "detection_interval" field sent from old implementation. public static final String DETECTION_INTERVAL_FIELD = "detection_interval"; - public static final String WINDOW_DELAY_FIELD = "window_delay"; - public static final String SHINGLE_SIZE_FIELD = "shingle_size"; - private static final String LAST_UPDATE_TIME_FIELD = "last_update_time"; - public static final String UI_METADATA_FIELD = "ui_metadata"; - public static final String CATEGORY_FIELD = "category_field"; - public static final String USER_FIELD = "user"; public static final String DETECTOR_TYPE_FIELD = "detector_type"; - public static final String RESULT_INDEX_FIELD = "result_index"; - public static final String AGGREGATION = "aggregation_issue"; - public static final String TIMEOUT = "timeout"; @Deprecated public static final String DETECTION_DATE_RANGE_FIELD = "detection_date_range"; - private final String detectorId; - private final Long version; - private final String name; - private final String description; - private final String timeField; - private final List indices; - private final List featureAttributes; - private final QueryBuilder filterQuery; - private final TimeConfiguration detectionInterval; - private final TimeConfiguration windowDelay; - private final Integer shingleSize; - private final Map uiMetadata; - private final Integer schemaVersion; - private final Instant lastUpdateTime; - private final List categoryFields; - private User user; - private String detectorType; - private String resultIndex; + protected String detectorType; // TODO: support backward compatibility, will remove in future @Deprecated private DateRange detectionDateRange; - public static final int MAX_RESULT_INDEX_NAME_SIZE = 255; - // OS doesn’t allow uppercase: https://tinyurl.com/yse2xdbx - public static final String RESULT_INDEX_NAME_PATTERN = "[a-z0-9_-]+"; - public static String INVALID_RESULT_INDEX_NAME_SIZE = "Result index name size must contains less than " + MAX_RESULT_INDEX_NAME_SIZE + " characters"; @@ -155,6 +104,7 @@ public class AnomalyDetector implements Writeable, ToXContentObject { * @param categoryFields a list of partition fields * @param user user to which detector is associated * @param resultIndex result index + * @param imputationOption interpolation method and optional default values */ public AnomalyDetector( String detectorId, @@ -173,88 +123,58 @@ public AnomalyDetector( Instant lastUpdateTime, List categoryFields, User user, - String resultIndex + String resultIndex, + ImputationOption imputationOption ) { - if (Strings.isBlank(name)) { - throw new ValidationException(CommonMessages.EMPTY_NAME, ValidationIssueType.NAME, ValidationAspect.DETECTOR); - } - if (Strings.isBlank(timeField)) { - throw new ValidationException(CommonMessages.NULL_TIME_FIELD, ValidationIssueType.TIMEFIELD_FIELD, ValidationAspect.DETECTOR); - } - if (indices == null || indices.isEmpty()) { - throw new ValidationException(CommonMessages.EMPTY_INDICES, ValidationIssueType.INDICES, ValidationAspect.DETECTOR); - } + super( + detectorId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + detectionInterval, + imputationOption + ); + + checkAndThrowValidationErrors(ValidationAspect.DETECTOR); + if (detectionInterval == null) { - throw new ValidationException( - ADCommonMessages.NULL_DETECTION_INTERVAL, - ValidationIssueType.DETECTION_INTERVAL, - ValidationAspect.DETECTOR - ); - } - if (invalidShingleSizeRange(shingleSize)) { - throw new ValidationException( - "Shingle size must be a positive integer no larger than " + TimeSeriesSettings.MAX_SHINGLE_SIZE + ". Got " + shingleSize, - ValidationIssueType.SHINGLE_SIZE_FIELD, - ValidationAspect.DETECTOR - ); + errorMessage = ADCommonMessages.NULL_DETECTION_INTERVAL; + issueType = ValidationIssueType.DETECTION_INTERVAL; + } else if (((IntervalTimeConfiguration) detectionInterval).getInterval() <= 0) { + errorMessage = ADCommonMessages.INVALID_DETECTION_INTERVAL; + issueType = ValidationIssueType.DETECTION_INTERVAL; } + int maxCategoryFields = ADNumericSetting.maxCategoricalFields(); if (categoryFields != null && categoryFields.size() > maxCategoryFields) { - throw new ValidationException( - CommonMessages.getTooManyCategoricalFieldErr(maxCategoryFields), - ValidationIssueType.CATEGORY, - ValidationAspect.DETECTOR - ); - } - if (((IntervalTimeConfiguration) detectionInterval).getInterval() <= 0) { - throw new ValidationException( - ADCommonMessages.INVALID_DETECTION_INTERVAL, - ValidationIssueType.DETECTION_INTERVAL, - ValidationAspect.DETECTOR - ); + errorMessage = CommonMessages.getTooManyCategoricalFieldErr(maxCategoryFields); + issueType = ValidationIssueType.CATEGORY; } - this.detectorId = detectorId; - this.version = version; - this.name = name; - this.description = description; - this.timeField = timeField; - this.indices = indices; - this.featureAttributes = features == null ? ImmutableList.of() : ImmutableList.copyOf(features); - this.filterQuery = filterQuery; - this.detectionInterval = detectionInterval; - this.windowDelay = windowDelay; - this.shingleSize = getShingleSize(shingleSize); - this.uiMetadata = uiMetadata; - this.schemaVersion = schemaVersion; - this.lastUpdateTime = lastUpdateTime; - this.categoryFields = categoryFields; - this.user = user; - this.detectorType = isMultientityDetector(categoryFields) ? MULTI_ENTITY.name() : SINGLE_ENTITY.name(); - this.resultIndex = Strings.trimToNull(resultIndex); - String errorMessage = validateResultIndex(this.resultIndex); - if (errorMessage != null) { - throw new ValidationException(errorMessage, ValidationIssueType.RESULT_INDEX, ValidationAspect.DETECTOR); - } - } - public static String validateResultIndex(String resultIndex) { - if (resultIndex == null) { - return null; - } - if (!resultIndex.startsWith(CUSTOM_RESULT_INDEX_PREFIX)) { - return INVALID_RESULT_INDEX_PREFIX; - } - if (resultIndex.length() > MAX_RESULT_INDEX_NAME_SIZE) { - return INVALID_RESULT_INDEX_NAME_SIZE; - } - if (!resultIndex.matches(RESULT_INDEX_NAME_PATTERN)) { - return INVALID_CHAR_IN_RESULT_INDEX_NAME; - } - return null; + checkAndThrowValidationErrors(ValidationAspect.DETECTOR); + + this.detectorType = isHC(categoryFields) ? MULTI_ENTITY.name() : SINGLE_ENTITY.name(); } + /* + * For backward compatiblity reason, we cannot use super class + * Config's constructor as we have detectionDateRange and + * detectorType that Config does not have. + */ public AnomalyDetector(StreamInput input) throws IOException { - detectorId = input.readOptionalString(); + id = input.readOptionalString(); version = input.readOptionalLong(); name = input.readString(); description = input.readOptionalString(); @@ -262,7 +182,7 @@ public AnomalyDetector(StreamInput input) throws IOException { indices = input.readStringList(); featureAttributes = input.readList(Feature::new); filterQuery = input.readNamedWriteable(QueryBuilder.class); - detectionInterval = IntervalTimeConfiguration.readFrom(input); + interval = IntervalTimeConfiguration.readFrom(input); windowDelay = IntervalTimeConfiguration.readFrom(input); shingleSize = input.readInt(); schemaVersion = input.readInt(); @@ -284,16 +204,27 @@ public AnomalyDetector(StreamInput input) throws IOException { } else { this.uiMetadata = null; } - resultIndex = input.readOptionalString(); + customResultIndex = input.readOptionalString(); + if (input.readBoolean()) { + this.imputationOption = new ImputationOption(input); + } else { + this.imputationOption = null; + } + this.imputer = createImputer(); } public XContentBuilder toXContent(XContentBuilder builder) throws IOException { return toXContent(builder, ToXContent.EMPTY_PARAMS); } + /* + * For backward compatiblity reason, we cannot use super class + * Config's writeTo as we have detectionDateRange and + * detectorType that Config does not have. + */ @Override public void writeTo(StreamOutput output) throws IOException { - output.writeOptionalString(detectorId); + output.writeOptionalString(id); output.writeOptionalLong(version); output.writeString(name); output.writeOptionalString(description); @@ -301,7 +232,7 @@ public void writeTo(StreamOutput output) throws IOException { output.writeStringCollection(indices); output.writeList(featureAttributes); output.writeNamedWriteable(filterQuery); - detectionInterval.writeTo(output); + interval.writeTo(output); windowDelay.writeTo(output); output.writeInt(shingleSize); output.writeInt(schemaVersion); @@ -326,45 +257,28 @@ public void writeTo(StreamOutput output) throws IOException { } else { output.writeBoolean(false); } - output.writeOptionalString(resultIndex); + output.writeOptionalString(customResultIndex); + if (imputationOption != null) { + output.writeBoolean(true); + imputationOption.writeTo(output); + } else { + output.writeBoolean(false); + } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - XContentBuilder xContentBuilder = builder - .startObject() - .field(NAME_FIELD, name) - .field(DESCRIPTION_FIELD, description) - .field(TIMEFIELD_FIELD, timeField) - .field(INDICES_FIELD, indices.toArray()) - .field(FILTER_QUERY_FIELD, filterQuery) - .field(DETECTION_INTERVAL_FIELD, detectionInterval) - .field(WINDOW_DELAY_FIELD, windowDelay) - .field(SHINGLE_SIZE_FIELD, shingleSize) - .field(CommonName.SCHEMA_VERSION_FIELD, schemaVersion) - .field(FEATURE_ATTRIBUTES_FIELD, featureAttributes.toArray()); - - if (uiMetadata != null && !uiMetadata.isEmpty()) { - xContentBuilder.field(UI_METADATA_FIELD, uiMetadata); - } - if (lastUpdateTime != null) { - xContentBuilder.field(LAST_UPDATE_TIME_FIELD, lastUpdateTime.toEpochMilli()); - } - if (categoryFields != null) { - xContentBuilder.field(CATEGORY_FIELD, categoryFields.toArray()); - } - if (user != null) { - xContentBuilder.field(USER_FIELD, user); - } + XContentBuilder xContentBuilder = builder.startObject(); + xContentBuilder = super.toXContent(xContentBuilder, params); + xContentBuilder.field(DETECTION_INTERVAL_FIELD, interval); + if (detectorType != null) { xContentBuilder.field(DETECTOR_TYPE_FIELD, detectorType); } if (detectionDateRange != null) { xContentBuilder.field(DETECTION_DATE_RANGE_FIELD, detectionDateRange); } - if (resultIndex != null) { - xContentBuilder.field(RESULT_INDEX_FIELD, resultIndex); - } + return xContentBuilder.endObject(); } @@ -435,6 +349,7 @@ public static AnomalyDetector parse( String resultIndex = null; List categoryField = null; + ImputationOption imputationOption = null; ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { @@ -460,7 +375,7 @@ public static AnomalyDetector parse( case UI_METADATA_FIELD: uiMetadata = parser.map(); break; - case CommonName.SCHEMA_VERSION_FIELD: + case org.opensearch.timeseries.constant.CommonName.SCHEMA_VERSION_FIELD: schemaVersion = parser.intValue(); break; case FILTER_QUERY_FIELD: @@ -542,6 +457,9 @@ public static AnomalyDetector parse( case RESULT_INDEX_FIELD: resultIndex = parser.text(); break; + case IMPUTATION_OPTION_FIELD: + imputationOption = ImputationOption.parse(parser); + break; default: parser.skipChildren(); break; @@ -564,168 +482,13 @@ public static AnomalyDetector parse( lastUpdateTime, categoryField, user, - resultIndex + resultIndex, + imputationOption ); detector.setDetectionDateRange(detectionDateRange); return detector; } - @Generated - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - AnomalyDetector detector = (AnomalyDetector) o; - return Objects.equal(getName(), detector.getName()) - && Objects.equal(getDescription(), detector.getDescription()) - && Objects.equal(getTimeField(), detector.getTimeField()) - && Objects.equal(getIndices(), detector.getIndices()) - && Objects.equal(getFeatureAttributes(), detector.getFeatureAttributes()) - && Objects.equal(getFilterQuery(), detector.getFilterQuery()) - && Objects.equal(getDetectionInterval(), detector.getDetectionInterval()) - && Objects.equal(getWindowDelay(), detector.getWindowDelay()) - && Objects.equal(getShingleSize(), detector.getShingleSize()) - && Objects.equal(getCategoryField(), detector.getCategoryField()) - && Objects.equal(getUser(), detector.getUser()) - && Objects.equal(getResultIndex(), detector.getResultIndex()); - } - - @Generated - @Override - public int hashCode() { - return Objects - .hashCode( - detectorId, - name, - description, - timeField, - indices, - featureAttributes, - detectionInterval, - windowDelay, - shingleSize, - uiMetadata, - schemaVersion, - lastUpdateTime, - user, - detectorType, - resultIndex - ); - } - - public String getDetectorId() { - return detectorId; - } - - public Long getVersion() { - return version; - } - - public String getName() { - return name; - } - - public String getDescription() { - return description; - } - - public String getTimeField() { - return timeField; - } - - public List getIndices() { - return indices; - } - - public List getFeatureAttributes() { - return featureAttributes; - } - - public QueryBuilder getFilterQuery() { - return filterQuery; - } - - /** - * Returns enabled feature ids in the same order in feature attributes. - * - * @return a list of filtered feature ids. - */ - public List getEnabledFeatureIds() { - return featureAttributes.stream().filter(Feature::getEnabled).map(Feature::getId).collect(Collectors.toList()); - } - - public List getEnabledFeatureNames() { - return featureAttributes.stream().filter(Feature::getEnabled).map(Feature::getName).collect(Collectors.toList()); - } - - public TimeConfiguration getDetectionInterval() { - return detectionInterval; - } - - public TimeConfiguration getWindowDelay() { - return windowDelay; - } - - public Integer getShingleSize() { - return shingleSize; - } - - /** - * If the given shingle size is null, return default based on the kind of detector; - * otherwise, return the given shingle size. - * - * TODO: need to deal with the case where customers start with single-entity detector, we set it to 8 by default; - * then cx update it to multi-entity detector, we would still use 8 in this case. Kibana needs to change to - * give the correct shingle size. - * @param customShingleSize Given shingle size - * @return Shingle size - */ - private static Integer getShingleSize(Integer customShingleSize) { - return customShingleSize == null ? DEFAULT_SHINGLE_SIZE : customShingleSize; - } - - public Map getUiMetadata() { - return uiMetadata; - } - - public Integer getSchemaVersion() { - return schemaVersion; - } - - public Instant getLastUpdateTime() { - return lastUpdateTime; - } - - public List getCategoryField() { - return this.categoryFields; - } - - public long getDetectorIntervalInMilliseconds() { - return ((IntervalTimeConfiguration) getDetectionInterval()).toDuration().toMillis(); - } - - public long getDetectorIntervalInSeconds() { - return getDetectorIntervalInMilliseconds() / 1000; - } - - public long getDetectorIntervalInMinutes() { - return getDetectorIntervalInMilliseconds() / 1000 / 60; - } - - public Duration getDetectionIntervalDuration() { - return ((IntervalTimeConfiguration) getDetectionInterval()).toDuration(); - } - - public User getUser() { - return user; - } - - public void setUser(User user) { - this.user = user; - } - public String getDetectorType() { return detectorType; } @@ -738,23 +501,16 @@ public DateRange getDetectionDateRange() { return detectionDateRange; } - public String getResultIndex() { - return resultIndex; - } - - public boolean isMultientityDetector() { - return AnomalyDetector.isMultientityDetector(getCategoryField()); - } - - public boolean isMultiCategoryDetector() { - return categoryFields != null && categoryFields.size() > 1; - } - - private static boolean isMultientityDetector(List categoryFields) { - return categoryFields != null && categoryFields.size() > 0; + @Override + protected ValidationAspect getConfigValidationAspect() { + return ValidationAspect.DETECTOR; } - public boolean invalidShingleSizeRange(Integer shingleSizeToTest) { - return shingleSizeToTest != null && (shingleSizeToTest < 1 || shingleSizeToTest > TimeSeriesSettings.MAX_SHINGLE_SIZE); + @Override + public String validateCustomResultIndex(String resultIndex) { + if (resultIndex != null && !resultIndex.startsWith(CUSTOM_RESULT_INDEX_PREFIX)) { + return ADCommonMessages.INVALID_RESULT_INDEX_PREFIX; + } + return super.validateCustomResultIndex(resultIndex); } } diff --git a/src/main/java/org/opensearch/ad/model/AnomalyDetectorExecutionInput.java b/src/main/java/org/opensearch/ad/model/AnomalyDetectorExecutionInput.java index 963114b7a..a8af32bb8 100644 --- a/src/main/java/org/opensearch/ad/model/AnomalyDetectorExecutionInput.java +++ b/src/main/java/org/opensearch/ad/model/AnomalyDetectorExecutionInput.java @@ -83,7 +83,6 @@ public static AnomalyDetectorExecutionInput parse(XContentParser parser, String periodEnd = ParseUtils.toInstant(parser); break; case DETECTOR_FIELD: - XContentParser.Token token = parser.currentToken(); if (parser.currentToken().equals(XContentParser.Token.START_OBJECT)) { detector = AnomalyDetector.parse(parser, detectorId); } diff --git a/src/main/java/org/opensearch/ad/model/AnomalyDetectorJob.java b/src/main/java/org/opensearch/ad/model/AnomalyDetectorJob.java index 48e72daf6..5ff9b07ef 100644 --- a/src/main/java/org/opensearch/ad/model/AnomalyDetectorJob.java +++ b/src/main/java/org/opensearch/ad/model/AnomalyDetectorJob.java @@ -250,7 +250,7 @@ public boolean equals(Object o) { && Objects.equal(getDisabledTime(), that.getDisabledTime()) && Objects.equal(getLastUpdateTime(), that.getLastUpdateTime()) && Objects.equal(getLockDurationSeconds(), that.getLockDurationSeconds()) - && Objects.equal(getResultIndex(), that.getResultIndex()); + && Objects.equal(getCustomResultIndex(), that.getCustomResultIndex()); } @Override @@ -300,7 +300,7 @@ public User getUser() { return user; } - public String getResultIndex() { + public String getCustomResultIndex() { return resultIndex; } } diff --git a/src/main/java/org/opensearch/ad/model/AnomalyResult.java b/src/main/java/org/opensearch/ad/model/AnomalyResult.java index 060414c68..1b34a9cbc 100644 --- a/src/main/java/org/opensearch/ad/model/AnomalyResult.java +++ b/src/main/java/org/opensearch/ad/model/AnomalyResult.java @@ -788,7 +788,7 @@ public String toString() { .toString(); } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/model/DetectorProfileName.java b/src/main/java/org/opensearch/ad/model/DetectorProfileName.java index 7df864a51..443066ac8 100644 --- a/src/main/java/org/opensearch/ad/model/DetectorProfileName.java +++ b/src/main/java/org/opensearch/ad/model/DetectorProfileName.java @@ -14,9 +14,9 @@ import java.util.Collection; import java.util.Set; -import org.opensearch.ad.Name; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; +import org.opensearch.timeseries.Name; public enum DetectorProfileName implements Name { STATE(ADCommonName.STATE), diff --git a/src/main/java/org/opensearch/ad/model/EntityProfileName.java b/src/main/java/org/opensearch/ad/model/EntityProfileName.java index 8a1c9ed74..84fd92987 100644 --- a/src/main/java/org/opensearch/ad/model/EntityProfileName.java +++ b/src/main/java/org/opensearch/ad/model/EntityProfileName.java @@ -14,9 +14,9 @@ import java.util.Collection; import java.util.Set; -import org.opensearch.ad.Name; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; +import org.opensearch.timeseries.Name; public enum EntityProfileName implements Name { INIT_PROGRESS(ADCommonName.INIT_PROGRESS), diff --git a/src/main/java/org/opensearch/ad/ratelimit/CheckPointMaintainRequestAdapter.java b/src/main/java/org/opensearch/ad/ratelimit/CheckPointMaintainRequestAdapter.java index 072855069..91382a4b5 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/CheckPointMaintainRequestAdapter.java +++ b/src/main/java/org/opensearch/ad/ratelimit/CheckPointMaintainRequestAdapter.java @@ -62,7 +62,7 @@ public CheckPointMaintainRequestAdapter( } public Optional convert(CheckpointMaintainRequest request) { - String detectorId = request.getDetectorId(); + String detectorId = request.getId(); String modelId = request.getEntityModelId(); Optional> stateToMaintain = cache.get().getForMaintainance(detectorId, modelId); diff --git a/src/main/java/org/opensearch/ad/ratelimit/CheckpointReadWorker.java b/src/main/java/org/opensearch/ad/ratelimit/CheckpointReadWorker.java index 9995b7170..bc24e2500 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/CheckpointReadWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/CheckpointReadWorker.java @@ -292,7 +292,7 @@ private void processCheckpointIteration( return; } - String detectorId = origRequest.getDetectorId(); + String detectorId = origRequest.getId(); Entity entity = origRequest.getEntity(); String modelId = modelIdOptional.get(); @@ -390,7 +390,7 @@ private ActionListener> onGetDetector( .toAnomalyResult( detector, Instant.ofEpochMilli(origRequest.getDataStartTimeMillis()), - Instant.ofEpochMilli(origRequest.getDataStartTimeMillis() + detector.getDetectorIntervalInMilliseconds()), + Instant.ofEpochMilli(origRequest.getDataStartTimeMillis() + detector.getIntervalInMilliseconds()), Instant.now(), Instant.now(), ParseUtils.getFeatureData(origRequest.getCurrentFeature(), detector), @@ -408,7 +408,7 @@ private ActionListener> onGetDetector( detectorId, result.getGrade() > 0 ? RequestPriority.HIGH : RequestPriority.MEDIUM, resultToSave, - detector.getResultIndex() + detector.getCustomResultIndex() ) ); } diff --git a/src/main/java/org/opensearch/ad/ratelimit/CheckpointWriteWorker.java b/src/main/java/org/opensearch/ad/ratelimit/CheckpointWriteWorker.java index 19f9a3287..dd32e21c4 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/CheckpointWriteWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/CheckpointWriteWorker.java @@ -131,7 +131,7 @@ protected ActionListener getResponseListener(List modelState, boolean forceWrite, Reques } if (modelState.getModel() != null) { - String detectorId = modelState.getDetectorId(); + String detectorId = modelState.getId(); String modelId = modelState.getModelId(); if (modelId == null || detectorId == null) { return; @@ -190,7 +190,7 @@ private ActionListener> onGetDetector( modelState.setLastCheckpointTime(clock.instant()); CheckpointWriteRequest request = new CheckpointWriteRequest( - System.currentTimeMillis() + detector.getDetectorIntervalInMilliseconds(), + System.currentTimeMillis() + detector.getIntervalInMilliseconds(), detectorId, priority, // If the document does not already exist, the contents of the upsert element @@ -243,7 +243,7 @@ public void writeAll(List> modelStates, String detectorI allRequests .add( new CheckpointWriteRequest( - System.currentTimeMillis() + detector.getDetectorIntervalInMilliseconds(), + System.currentTimeMillis() + detector.getIntervalInMilliseconds(), detectorId, priority, // If the document does not already exist, the contents of the upsert element diff --git a/src/main/java/org/opensearch/ad/ratelimit/EntityColdStartWorker.java b/src/main/java/org/opensearch/ad/ratelimit/EntityColdStartWorker.java index 8702fafcc..53d05ff11 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/EntityColdStartWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/EntityColdStartWorker.java @@ -100,7 +100,7 @@ public EntityColdStartWorker( @Override protected void executeRequest(EntityRequest coldStartRequest, ActionListener listener) { - String detectorId = coldStartRequest.getDetectorId(); + String detectorId = coldStartRequest.getId(); Optional modelId = coldStartRequest.getModelId(); diff --git a/src/main/java/org/opensearch/ad/ratelimit/QueuedRequest.java b/src/main/java/org/opensearch/ad/ratelimit/QueuedRequest.java index 4108e96d6..66c440db9 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/QueuedRequest.java +++ b/src/main/java/org/opensearch/ad/ratelimit/QueuedRequest.java @@ -47,7 +47,7 @@ public void setPriority(RequestPriority priority) { this.priority = priority; } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/ratelimit/RateLimitedRequestWorker.java b/src/main/java/org/opensearch/ad/ratelimit/RateLimitedRequestWorker.java index fd312376b..1eea96337 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/RateLimitedRequestWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/RateLimitedRequestWorker.java @@ -305,7 +305,7 @@ protected void putOnly(RequestType request) { // just use the RequestQueue priority (i.e., low or high) as the key of the RequestQueue map. RequestQueue requestQueue = requestQueues .computeIfAbsent( - RequestPriority.MEDIUM == request.getPriority() ? request.getDetectorId() : request.getPriority().name(), + RequestPriority.MEDIUM == request.getPriority() ? request.getId() : request.getPriority().name(), k -> new RequestQueue() ); @@ -559,7 +559,7 @@ protected void process() { LOG.error(String.format(Locale.ROOT, "Failed to process requests from %s", getWorkerName()), e); if (e != null && e instanceof TimeSeriesException) { TimeSeriesException adExep = (TimeSeriesException) e; - nodeStateManager.setException(adExep.getAnomalyDetectorId(), adExep); + nodeStateManager.setException(adExep.getConfigId(), adExep); } } diff --git a/src/main/java/org/opensearch/ad/ratelimit/ResultWriteRequest.java b/src/main/java/org/opensearch/ad/ratelimit/ResultWriteRequest.java index f104b1164..7acef66a7 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/ResultWriteRequest.java +++ b/src/main/java/org/opensearch/ad/ratelimit/ResultWriteRequest.java @@ -50,7 +50,7 @@ public AnomalyResult getResult() { return result; } - public String getResultIndex() { + public String getCustomResultIndex() { return resultIndex; } } diff --git a/src/main/java/org/opensearch/ad/ratelimit/ResultWriteWorker.java b/src/main/java/org/opensearch/ad/ratelimit/ResultWriteWorker.java index fac9a49f1..0aa94c35a 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/ResultWriteWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/ResultWriteWorker.java @@ -137,7 +137,7 @@ protected ActionListener getResponseListener( } for (ResultWriteRequest request : toProcess) { - nodeStateManager.setException(request.getDetectorId(), exception); + nodeStateManager.setException(request.getId(), exception); } LOG.error("Fail to save results", exception); }); @@ -154,7 +154,7 @@ private void enqueueRetryRequestIteration(List requestToRetry, int return; } AnomalyResult result = resultToRetry.get(); - String detectorId = result.getDetectorId(); + String detectorId = result.getId(); nodeStateManager.getAnomalyDetector(detectorId, onGetDetector(requestToRetry, index, detectorId, result)); } @@ -175,11 +175,11 @@ private ActionListener> onGetDetector( super.put( new ResultWriteRequest( // expire based on execute start time - resultToRetry.getExecutionStartTime().toEpochMilli() + detector.getDetectorIntervalInMilliseconds(), + resultToRetry.getExecutionStartTime().toEpochMilli() + detector.getIntervalInMilliseconds(), detectorId, resultToRetry.isHighPriority() ? RequestPriority.HIGH : RequestPriority.MEDIUM, resultToRetry, - detector.getResultIndex() + detector.getCustomResultIndex() ) ); diff --git a/src/main/java/org/opensearch/ad/rest/handler/AbstractAnomalyDetectorActionHandler.java b/src/main/java/org/opensearch/ad/rest/handler/AbstractAnomalyDetectorActionHandler.java index e37ef4514..ab93e0579 100644 --- a/src/main/java/org/opensearch/ad/rest/handler/AbstractAnomalyDetectorActionHandler.java +++ b/src/main/java/org/opensearch/ad/rest/handler/AbstractAnomalyDetectorActionHandler.java @@ -248,7 +248,7 @@ public AbstractAnomalyDetectorActionHandler( * mapping. */ public void start() { - String resultIndex = anomalyDetector.getResultIndex(); + String resultIndex = anomalyDetector.getCustomResultIndex(); // use default detector result index which is system index if (resultIndex == null) { createOrUpdateDetector(); @@ -427,11 +427,11 @@ private void onGetAnomalyDetectorResponse(GetResponse response, boolean indexing // If single-category HC changed category field from IP to error type, the AD result page may show both IP and error type // in top N entities list. That's confusing. // So we decide to block updating detector category field. - if (!listEqualsWithoutConsideringOrder(existingDetector.getCategoryField(), anomalyDetector.getCategoryField())) { + if (!listEqualsWithoutConsideringOrder(existingDetector.getCategoryFields(), anomalyDetector.getCategoryFields())) { listener.onFailure(new OpenSearchStatusException(CommonMessages.CAN_NOT_CHANGE_CATEGORY_FIELD, RestStatus.BAD_REQUEST)); return; } - if (!Objects.equals(existingDetector.getResultIndex(), anomalyDetector.getResultIndex())) { + if (!Objects.equals(existingDetector.getCustomResultIndex(), anomalyDetector.getCustomResultIndex())) { listener .onFailure(new OpenSearchStatusException(CommonMessages.CAN_NOT_CHANGE_CUSTOM_RESULT_INDEX, RestStatus.BAD_REQUEST)); return; @@ -462,7 +462,7 @@ protected void validateExistingDetector(AnomalyDetector existingDetector, boolea } protected boolean hasCategoryField(AnomalyDetector detector) { - return detector.getCategoryField() != null && !detector.getCategoryField().isEmpty(); + return detector.getCategoryFields() != null && !detector.getCategoryFields().isEmpty(); } protected void validateAgainstExistingMultiEntityAnomalyDetector(String detectorId, boolean indexingDryRun) { @@ -489,7 +489,7 @@ protected void validateAgainstExistingMultiEntityAnomalyDetector(String detector protected void createAnomalyDetector(boolean indexingDryRun) { try { - List categoricalFields = anomalyDetector.getCategoryField(); + List categoricalFields = anomalyDetector.getCategoryFields(); if (categoricalFields != null && categoricalFields.size() > 0) { validateAgainstExistingMultiEntityAnomalyDetector(null, indexingDryRun); } else { @@ -552,7 +552,7 @@ protected void onSearchMultiEntityAdResponse(SearchResponse response, String det @SuppressWarnings("unchecked") protected void validateCategoricalField(String detectorId, boolean indexingDryRun) { - List categoryField = anomalyDetector.getCategoryField(); + List categoryField = anomalyDetector.getCategoryFields(); if (categoryField == null) { searchAdInputIndices(detectorId, indexingDryRun); @@ -770,7 +770,7 @@ protected void finishDetectorValidationOrContinueToModelValidation() { @SuppressWarnings("unchecked") protected void indexAnomalyDetector(String detectorId) throws IOException { AnomalyDetector detector = new AnomalyDetector( - anomalyDetector.getDetectorId(), + anomalyDetector.getId(), anomalyDetector.getVersion(), anomalyDetector.getName(), anomalyDetector.getDescription(), @@ -778,15 +778,16 @@ protected void indexAnomalyDetector(String detectorId) throws IOException { anomalyDetector.getIndices(), anomalyDetector.getFeatureAttributes(), anomalyDetector.getFilterQuery(), - anomalyDetector.getDetectionInterval(), + anomalyDetector.getInterval(), anomalyDetector.getWindowDelay(), anomalyDetector.getShingleSize(), anomalyDetector.getUiMetadata(), anomalyDetector.getSchemaVersion(), Instant.now(), - anomalyDetector.getCategoryField(), + anomalyDetector.getCategoryFields(), user, - anomalyDetector.getResultIndex() + anomalyDetector.getCustomResultIndex(), + anomalyDetector.getImputationOption() ); IndexRequest indexRequest = new IndexRequest(CommonName.CONFIG_INDEX) .setRefreshPolicy(refreshPolicy) diff --git a/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java b/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java index c35f2ee5b..f83021169 100644 --- a/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java +++ b/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java @@ -126,10 +126,10 @@ public void startAnomalyDetectorJob(AnomalyDetector detector, ActionListener startListener = ActionListener.wrap(r -> { try { Instant executionEndTime = Instant.now(); - IntervalTimeConfiguration schedule = (IntervalTimeConfiguration) detector.getDetectionInterval(); + IntervalTimeConfiguration schedule = (IntervalTimeConfiguration) detector.getInterval(); Instant executionStartTime = executionEndTime.minus(schedule.getInterval(), schedule.getUnit()); AnomalyResultRequest getRequest = new AnomalyResultRequest( - detector.getDetectorId(), + detector.getId(), executionStartTime.toEpochMilli(), executionEndTime.toEpochMilli() ); @@ -183,12 +183,12 @@ public void startAnomalyDetectorJob(AnomalyDetector detector, ActionListener listener) { try { - IntervalTimeConfiguration interval = (IntervalTimeConfiguration) detector.getDetectionInterval(); + IntervalTimeConfiguration interval = (IntervalTimeConfiguration) detector.getInterval(); Schedule schedule = new IntervalSchedule(Instant.now(), (int) interval.getInterval(), interval.getUnit()); Duration duration = Duration.of(interval.getInterval(), interval.getUnit()); AnomalyDetectorJob job = new AnomalyDetectorJob( - detector.getDetectorId(), + detector.getId(), schedule, detector.getWindowDelay(), true, @@ -197,7 +197,7 @@ private void createJob(AnomalyDetector detector, ActionListener> topEntityListener) { // Look at data back to the lower bound given the max interval we recommend or one given - long maxIntervalInMinutes = Math.max(MAX_INTERVAL_REC_LENGTH_IN_MINUTES, anomalyDetector.getDetectorIntervalInMinutes()); + long maxIntervalInMinutes = Math.max(MAX_INTERVAL_REC_LENGTH_IN_MINUTES, anomalyDetector.getIntervalInMinutes()); LongBounds timeRangeBounds = getTimeRangeBounds( Instant.now().toEpochMilli(), new IntervalTimeConfiguration(maxIntervalInMinutes, ChronoUnit.MINUTES) @@ -181,17 +181,17 @@ private void getTopEntity(ActionListener> topEntityListener) .to(timeRangeBounds.getMax()); AggregationBuilder bucketAggs; Map topKeys = new HashMap<>(); - if (anomalyDetector.getCategoryField().size() == 1) { + if (anomalyDetector.getCategoryFields().size() == 1) { bucketAggs = AggregationBuilders .terms(AGG_NAME_TOP) - .field(anomalyDetector.getCategoryField().get(0)) + .field(anomalyDetector.getCategoryFields().get(0)) .order(BucketOrder.count(true)); } else { bucketAggs = AggregationBuilders .composite( AGG_NAME_TOP, anomalyDetector - .getCategoryField() + .getCategoryFields() .stream() .map(f -> new TermsValuesSourceBuilder(f).field(f)) .collect(Collectors.toList()) @@ -217,7 +217,7 @@ private void getTopEntity(ActionListener> topEntityListener) topEntityListener.onResponse(Collections.emptyMap()); return; } - if (anomalyDetector.getCategoryField().size() == 1) { + if (anomalyDetector.getCategoryFields().size() == 1) { Terms entities = aggs.get(AGG_NAME_TOP); Object key = entities .getBuckets() @@ -225,7 +225,7 @@ private void getTopEntity(ActionListener> topEntityListener) .max(Comparator.comparingInt(entry -> (int) entry.getDocCount())) .map(MultiBucketsAggregation.Bucket::getKeyAsString) .orElse(null); - topKeys.put(anomalyDetector.getCategoryField().get(0), key); + topKeys.put(anomalyDetector.getCategoryFields().get(0), key); } else { CompositeAggregation compositeAgg = aggs.get(AGG_NAME_TOP); topKeys @@ -287,7 +287,7 @@ private void getSampleRangesForValidationChecks( try { getBucketAggregates(timeRangeEnd, listener, topEntity); } catch (IOException e) { - listener.onFailure(new EndRunException(detector.getDetectorId(), CommonMessages.INVALID_SEARCH_QUERY_MSG, e, true)); + listener.onFailure(new EndRunException(detector.getId(), CommonMessages.INVALID_SEARCH_QUERY_MSG, e, true)); } } @@ -296,12 +296,9 @@ private void getBucketAggregates( ActionListener listener, Map topEntity ) throws IOException { - AggregationBuilder aggregation = getBucketAggregation( - latestTime, - (IntervalTimeConfiguration) anomalyDetector.getDetectionInterval() - ); + AggregationBuilder aggregation = getBucketAggregation(latestTime, (IntervalTimeConfiguration) anomalyDetector.getInterval()); BoolQueryBuilder query = QueryBuilders.boolQuery().filter(anomalyDetector.getFilterQuery()); - if (anomalyDetector.isMultientityDetector()) { + if (anomalyDetector.isHighCardinality()) { if (topEntity.isEmpty()) { listener .onFailure( @@ -333,7 +330,7 @@ private void getBucketAggregates( new ModelValidationActionHandler.DetectorIntervalRecommendationListener( intervalListener, searchRequest.source(), - (IntervalTimeConfiguration) anomalyDetector.getDetectionInterval(), + (IntervalTimeConfiguration) anomalyDetector.getInterval(), clock.millis() + TOP_VALIDATE_TIMEOUT_IN_MILLIS, latestTime, false, @@ -435,7 +432,7 @@ public void onResponse(SearchResponse response) { // we aren't decreasing yet, at this point we will start decreasing for the first time // if we are inside the below block } else if (newIntervalMinute >= MAX_INTERVAL_REC_LENGTH_IN_MINUTES && !decreasingInterval) { - IntervalTimeConfiguration givenInterval = (IntervalTimeConfiguration) anomalyDetector.getDetectionInterval(); + IntervalTimeConfiguration givenInterval = (IntervalTimeConfiguration) anomalyDetector.getInterval(); this.detectorInterval = new IntervalTimeConfiguration( (long) Math .floor( @@ -523,7 +520,7 @@ private void processIntervalRecommendation(IntervalTimeConfiguration interval, l if (interval == null) { checkRawDataSparsity(latestTime); } else { - if (interval.equals(anomalyDetector.getDetectionInterval())) { + if (interval.equals(anomalyDetector.getInterval())) { logger.info("Using the current interval there is enough dense data "); // Check if there is a window delay recommendation if everything else is successful and send exception if (Instant.now().toEpochMilli() - latestTime > timeConfigToMilliSec(anomalyDetector.getWindowDelay())) { @@ -561,10 +558,7 @@ private SearchSourceBuilder getSearchSourceBuilder(QueryBuilder query, Aggregati } private void checkRawDataSparsity(long latestTime) { - AggregationBuilder aggregation = getBucketAggregation( - latestTime, - (IntervalTimeConfiguration) anomalyDetector.getDetectionInterval() - ); + AggregationBuilder aggregation = getBucketAggregation(latestTime, (IntervalTimeConfiguration) anomalyDetector.getInterval()); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().aggregation(aggregation).size(0).timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); final ActionListener searchResponseListener = ActionListener @@ -623,10 +617,7 @@ private void processRawDataResults(SearchResponse response, long latestTime) { } private void checkDataFilterSparsity(long latestTime) { - AggregationBuilder aggregation = getBucketAggregation( - latestTime, - (IntervalTimeConfiguration) anomalyDetector.getDetectionInterval() - ); + AggregationBuilder aggregation = getBucketAggregation(latestTime, (IntervalTimeConfiguration) anomalyDetector.getInterval()); BoolQueryBuilder query = QueryBuilders.boolQuery().filter(anomalyDetector.getFilterQuery()); SearchSourceBuilder searchSourceBuilder = getSearchSourceBuilder(query, aggregation); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); @@ -662,7 +653,7 @@ private void processDataFilterResults(SearchResponse response, long latestTime) // blocks below are executed if data is dense enough with filter query applied. // If HCAD then category fields will be added to bucket aggregation to see if they // are the root cause of the issues and if not the feature queries will be checked for sparsity - } else if (anomalyDetector.isMultientityDetector()) { + } else if (anomalyDetector.isHighCardinality()) { getTopEntityForCategoryField(latestTime); } else { try { @@ -689,10 +680,7 @@ private void checkCategoryFieldSparsity(Map topEntity, long late for (Map.Entry entry : topEntity.entrySet()) { query.filter(QueryBuilders.termQuery(entry.getKey(), entry.getValue())); } - AggregationBuilder aggregation = getBucketAggregation( - latestTime, - (IntervalTimeConfiguration) anomalyDetector.getDetectionInterval() - ); + AggregationBuilder aggregation = getBucketAggregation(latestTime, (IntervalTimeConfiguration) anomalyDetector.getInterval()); SearchSourceBuilder searchSourceBuilder = getSearchSourceBuilder(query, aggregation); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); final ActionListener searchResponseListener = ActionListener @@ -754,10 +742,7 @@ private void checkFeatureQueryDelegate(long latestTime) throws IOException { ); for (Feature feature : anomalyDetector.getFeatureAttributes()) { - AggregationBuilder aggregation = getBucketAggregation( - latestTime, - (IntervalTimeConfiguration) anomalyDetector.getDetectionInterval() - ); + AggregationBuilder aggregation = getBucketAggregation(latestTime, (IntervalTimeConfiguration) anomalyDetector.getInterval()); BoolQueryBuilder query = QueryBuilders.boolQuery().filter(anomalyDetector.getFilterQuery()); List featureFields = ParseUtils.getFieldNamesForFeature(feature, xContentRegistry); for (String featureField : featureFields) { @@ -841,7 +826,7 @@ private LongBounds getTimeRangeBounds(long endMillis, IntervalTimeConfiguration } private int getNumberOfSamples() { - long interval = anomalyDetector.getDetectorIntervalInMilliseconds(); + long interval = anomalyDetector.getIntervalInMilliseconds(); return Math .max( (int) (Duration.ofHours(AnomalyDetectorSettings.TRAIN_SAMPLE_TIME_RANGE_IN_HOURS).toMillis() / interval), diff --git a/src/main/java/org/opensearch/ad/task/ADBatchTaskCache.java b/src/main/java/org/opensearch/ad/task/ADBatchTaskCache.java index 5e996c013..8f526375a 100644 --- a/src/main/java/org/opensearch/ad/task/ADBatchTaskCache.java +++ b/src/main/java/org/opensearch/ad/task/ADBatchTaskCache.java @@ -55,7 +55,7 @@ public class ADBatchTaskCache { private Entity entity; protected ADBatchTaskCache(ADTask adTask) { - this.detectorId = adTask.getDetectorId(); + this.detectorId = adTask.getId(); this.taskId = adTask.getTaskId(); this.detectorTaskId = adTask.getDetectorLevelTaskId(); this.entity = adTask.getEntity(); @@ -85,7 +85,7 @@ protected ADBatchTaskCache(ADTask adTask) { this.thresholdModelTrained = false; } - protected String getDetectorId() { + protected String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/task/ADBatchTaskRunner.java b/src/main/java/org/opensearch/ad/task/ADBatchTaskRunner.java index 9902c0a7e..dd1987b2e 100644 --- a/src/main/java/org/opensearch/ad/task/ADBatchTaskRunner.java +++ b/src/main/java/org/opensearch/ad/task/ADBatchTaskRunner.java @@ -219,8 +219,8 @@ public ADBatchTaskRunner( * @param listener action listener */ public void run(ADTask adTask, TransportService transportService, ActionListener listener) { - boolean isHCDetector = adTask.getDetector().isMultientityDetector(); - if (isHCDetector && !adTaskCacheManager.topEntityInited(adTask.getDetectorId())) { + boolean isHCDetector = adTask.getDetector().isHighCardinality(); + if (isHCDetector && !adTaskCacheManager.topEntityInited(adTask.getId())) { // Initialize top entities for HC detector threadPool.executor(AD_BATCH_TASK_THREAD_POOL_NAME).execute(() -> { ActionListener hcDelegatedListener = getInternalHCDelegatedListener(adTask); @@ -262,7 +262,7 @@ private ActionListener getTopEntitiesListener( ActionListener listener ) { String taskId = adTask.getTaskId(); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); ActionListener actionListener = ActionListener.wrap(response -> { adTaskCacheManager.setTopEntityInited(detectorId); int totalEntities = adTaskCacheManager.getPendingEntityCount(detectorId); @@ -325,11 +325,11 @@ public void getTopEntities(ADTask adTask, ActionListener internalHCListe getDateRangeOfSourceData(adTask, (dataStartTime, dataEndTime) -> { PriorityTracker priorityTracker = new PriorityTracker( Clock.systemUTC(), - adTask.getDetector().getDetectorIntervalInSeconds(), + adTask.getDetector().getIntervalInSeconds(), adTask.getDetectionDateRange().getStartTime().toEpochMilli(), MAX_TOP_ENTITIES_LIMIT_FOR_HISTORICAL_ANALYSIS ); - long detectorInterval = adTask.getDetector().getDetectorIntervalInMilliseconds(); + long detectorInterval = adTask.getDetector().getIntervalInMilliseconds(); logger .debug( "start to search top entities at {}, data start time: {}, data end time: {}, interval: {}", @@ -338,7 +338,7 @@ public void getTopEntities(ADTask adTask, ActionListener internalHCListe dataEndTime, detectorInterval ); - if (adTask.getDetector().isMultiCategoryDetector()) { + if (adTask.getDetector().hasMultipleCategories()) { searchTopEntitiesForMultiCategoryHC( adTask, priorityTracker, @@ -390,19 +390,19 @@ private void searchTopEntitiesForMultiCategoryHC( logger.debug("finish searching top entities at " + System.currentTimeMillis()); List topNEntities = priorityTracker.getTopNEntities(maxTopEntitiesPerHcDetector); if (topNEntities.size() == 0) { - logger.error("There is no entity found for detector " + adTask.getDetectorId()); - internalHCListener.onFailure(new ResourceNotFoundException(adTask.getDetectorId(), "No entity found")); + logger.error("There is no entity found for detector " + adTask.getId()); + internalHCListener.onFailure(new ResourceNotFoundException(adTask.getId(), "No entity found")); return; } - adTaskCacheManager.addPendingEntities(adTask.getDetectorId(), topNEntities); - adTaskCacheManager.setTopEntityCount(adTask.getDetectorId(), topNEntities.size()); + adTaskCacheManager.addPendingEntities(adTask.getId(), topNEntities); + adTaskCacheManager.setTopEntityCount(adTask.getId(), topNEntities.size()); internalHCListener.onResponse("Get top entities done"); } }, e -> { - logger.error("Failed to get top entities for detector " + adTask.getDetectorId(), e); + logger.error("Failed to get top entities for detector " + adTask.getId(), e); internalHCListener.onFailure(e); }); - int minimumDocCount = Math.max((int) (bucketInterval / adTask.getDetector().getDetectorIntervalInMilliseconds()) / 2, 1); + int minimumDocCount = Math.max((int) (bucketInterval / adTask.getDetector().getIntervalInMilliseconds()) / 2, 1); searchFeatureDao .getHighestCountEntities( adTask.getDetector(), @@ -437,7 +437,7 @@ private void searchTopEntitiesForSingleCategoryHC( String topEntitiesAgg = "topEntities"; AggregationBuilder aggregation = new TermsAggregationBuilder(topEntitiesAgg) - .field(adTask.getDetector().getCategoryField().get(0)) + .field(adTask.getDetector().getCategoryFields().get(0)) .size(MAX_TOP_ENTITIES_LIMIT_FOR_HISTORICAL_ANALYSIS); sourceBuilder.aggregation(aggregation).size(0); SearchRequest searchRequest = new SearchRequest(); @@ -467,16 +467,16 @@ private void searchTopEntitiesForSingleCategoryHC( logger.debug("finish searching top entities at " + System.currentTimeMillis()); List topNEntities = priorityTracker.getTopNEntities(maxTopEntitiesPerHcDetector); if (topNEntities.size() == 0) { - logger.error("There is no entity found for detector " + adTask.getDetectorId()); - internalHCListener.onFailure(new ResourceNotFoundException(adTask.getDetectorId(), "No entity found")); + logger.error("There is no entity found for detector " + adTask.getId()); + internalHCListener.onFailure(new ResourceNotFoundException(adTask.getId(), "No entity found")); return; } - adTaskCacheManager.addPendingEntities(adTask.getDetectorId(), topNEntities); - adTaskCacheManager.setTopEntityCount(adTask.getDetectorId(), topNEntities.size()); + adTaskCacheManager.addPendingEntities(adTask.getId(), topNEntities); + adTaskCacheManager.setTopEntityCount(adTask.getId(), topNEntities.size()); internalHCListener.onResponse("Get top entities done"); } }, e -> { - logger.error("Failed to get top entities for detector " + adTask.getDetectorId(), e); + logger.error("Failed to get top entities for detector " + adTask.getId(), e); internalHCListener.onFailure(e); }); // using the original context in listener as user roles have no permissions for internal operations like fetching a @@ -511,9 +511,9 @@ public void forwardOrExecuteADTask( ) { try { checkIfADTaskCancelledAndCleanupCache(adTask); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); AnomalyDetector detector = adTask.getDetector(); - boolean isHCDetector = detector.isMultientityDetector(); + boolean isHCDetector = detector.isHighCardinality(); if (isHCDetector) { String entityString = adTaskCacheManager.pollEntity(detectorId); logger.debug("Start to run entity: {} of detector {}", entityString, detectorId); @@ -560,7 +560,7 @@ public void forwardOrExecuteADTask( logger.info("Create entity task for entity:{}", entityString); Instant now = Instant.now(); ADTask adEntityTask = new ADTask.Builder() - .detectorId(adTask.getDetectorId()) + .detectorId(adTask.getId()) .detector(detector) .isLatest(true) .taskType(ADTaskType.HISTORICAL_HC_ENTITY.name()) @@ -638,7 +638,7 @@ private ActionListener workerNodeResponseListener( if (adTask.isEntityTask()) { // When reach this line, the entity task already been put into worker node's cache. // Then it's safe to move entity from temp entities queue to running entities queue. - adTaskCacheManager.moveToRunningEntity(adTask.getDetectorId(), adTaskManager.convertEntityToString(adTask)); + adTaskCacheManager.moveToRunningEntity(adTask.getId(), adTaskManager.convertEntityToString(adTask)); } startNewEntityTaskLane(adTask, transportService); }, e -> { @@ -646,10 +646,10 @@ private ActionListener workerNodeResponseListener( listener.onFailure(e); handleException(adTask, e); - if (adTask.getDetector().isMultientityDetector()) { + if (adTask.getDetector().isHighCardinality()) { // Entity task done on worker node. Send entity task done message to coordinating node to poll next entity. adTaskManager.entityTaskDone(adTask, e, transportService); - if (adTaskCacheManager.getAvailableNewEntityTaskLanes(adTask.getDetectorId()) > 0) { + if (adTaskCacheManager.getAvailableNewEntityTaskLanes(adTask.getId()) > 0) { // When reach this line, it means entity task failed to start on worker node // Sleep some time before starting new task lane. threadPool @@ -698,8 +698,8 @@ private void forwardOrExecuteEntityTask( // start new entity task lane private synchronized void startNewEntityTaskLane(ADTask adTask, TransportService transportService) { - if (adTask.getDetector().isMultientityDetector() && adTaskCacheManager.getAndDecreaseEntityTaskLanes(adTask.getDetectorId()) > 0) { - logger.debug("start new task lane for detector {}", adTask.getDetectorId()); + if (adTask.getDetector().isHighCardinality() && adTaskCacheManager.getAndDecreaseEntityTaskLanes(adTask.getId()) > 0) { + logger.debug("start new task lane for detector {}", adTask.getId()); forwardOrExecuteADTask(adTask, transportService, getInternalHCDelegatedListener(adTask)); } } @@ -721,10 +721,10 @@ private void dispatchTask(ADTask adTask, ActionListener listener) .append(DEFAULT_JVM_HEAP_USAGE_THRESHOLD) .append("%. ") .append(NO_ELIGIBLE_NODE_TO_RUN_DETECTOR) - .append(adTask.getDetectorId()); + .append(adTask.getId()); String errorMessage = errorMessageBuilder.toString(); logger.warn(errorMessage + ", task id " + adTask.getTaskId() + ", " + adTask.getTaskType()); - listener.onFailure(new LimitExceededException(adTask.getDetectorId(), errorMessage)); + listener.onFailure(new LimitExceededException(adTask.getId(), errorMessage)); return; } candidateNodeResponse = candidateNodeResponse @@ -734,10 +734,10 @@ private void dispatchTask(ADTask adTask, ActionListener listener) if (candidateNodeResponse.size() == 0) { StringBuilder errorMessageBuilder = new StringBuilder("All nodes' executing batch tasks exceeds limitation ") .append(NO_ELIGIBLE_NODE_TO_RUN_DETECTOR) - .append(adTask.getDetectorId()); + .append(adTask.getId()); String errorMessage = errorMessageBuilder.toString(); logger.warn(errorMessage + ", task id " + adTask.getTaskId() + ", " + adTask.getTaskType()); - listener.onFailure(new LimitExceededException(adTask.getDetectorId(), errorMessage)); + listener.onFailure(new LimitExceededException(adTask.getId(), errorMessage)); return; } Optional targetNode = candidateNodeResponse @@ -798,12 +798,12 @@ public void startADBatchTaskOnWorkerNode( private ActionListener internalBatchTaskListener(ADTask adTask, TransportService transportService) { String taskId = adTask.getTaskId(); String detectorTaskId = adTask.getDetectorLevelTaskId(); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); ActionListener listener = ActionListener.wrap(response -> { // If batch task finished normally, remove task from cache and decrease executing task count by 1. adTaskCacheManager.remove(taskId, detectorId, detectorTaskId); adStats.getStat(AD_EXECUTING_BATCH_TASK_COUNT.getName()).decrement(); - if (!adTask.getDetector().isMultientityDetector()) { + if (!adTask.getDetector().isHighCardinality()) { // Set single-entity detector task as FINISHED here adTaskManager .cleanDetectorCache( @@ -820,7 +820,7 @@ private ActionListener internalBatchTaskListener(ADTask adTask, Transpor // If batch task failed, remove task from cache and decrease executing task count by 1. adTaskCacheManager.remove(taskId, detectorId, detectorTaskId); adStats.getStat(AD_EXECUTING_BATCH_TASK_COUNT.getName()).decrement(); - if (!adTask.getDetector().isMultientityDetector()) { + if (!adTask.getDetector().isHighCardinality()) { adTaskManager.cleanDetectorCache(adTask, transportService, () -> handleException(adTask, e)); } else { adTaskManager.entityTaskDone(adTask, e, transportService); @@ -865,7 +865,7 @@ private void executeADBatchTaskOnWorkerNode(ADTask adTask, ActionListener { - long interval = ((IntervalTimeConfiguration) adTask.getDetector().getDetectionInterval()) - .toDuration() - .toMillis(); + long interval = ((IntervalTimeConfiguration) adTask.getDetector().getInterval()).toDuration().toMillis(); long expectedPieceEndTime = dataStartTime + pieceSize * interval; long firstPieceEndTime = Math.min(expectedPieceEndTime, dataEndTime); logger @@ -922,7 +920,7 @@ private void runFirstPiece(ADTask adTask, Instant executeStartTime, ActionListen interval, dataStartTime, dataEndTime, - adTask.getDetectorId(), + adTask.getId(), adTask.getTaskId() ); getFeatureData( @@ -972,10 +970,10 @@ private void getDateRangeOfSourceData(ADTask adTask, BiConsumer cons double maxValue = maxAgg.getValue(); // If time field not exist or there is no value, will return infinity value if (minValue == Double.POSITIVE_INFINITY) { - internalListener.onFailure(new ResourceNotFoundException(adTask.getDetectorId(), "There is no data in the time field")); + internalListener.onFailure(new ResourceNotFoundException(adTask.getId(), "There is no data in the time field")); return; } - long interval = ((IntervalTimeConfiguration) adTask.getDetector().getDetectionInterval()).toDuration().toMillis(); + long interval = ((IntervalTimeConfiguration) adTask.getDetector().getInterval()).toDuration().toMillis(); DateRange detectionDateRange = adTask.getDetectionDateRange(); long dataStartTime = detectionDateRange.getStartTime().toEpochMilli(); @@ -984,8 +982,7 @@ private void getDateRangeOfSourceData(ADTask adTask, BiConsumer cons long maxDate = (long) maxValue; if (minDate >= dataEndTime || maxDate <= dataStartTime) { - internalListener - .onFailure(new ResourceNotFoundException(adTask.getDetectorId(), "There is no data in the detection date range")); + internalListener.onFailure(new ResourceNotFoundException(adTask.getId(), "There is no data in the detection date range")); return; } if (minDate > dataStartTime) { @@ -1100,7 +1097,7 @@ private void detectAnomaly( ? "No full shingle in current detection window" : "No data in current detection window"; AnomalyResult anomalyResult = new AnomalyResult( - adTask.getDetectorId(), + adTask.getId(), adTask.getDetectorLevelTaskId(), featureData, Instant.ofEpochMilli(intervalEndTime - interval), @@ -1126,8 +1123,8 @@ private void detectAnomaly( AnomalyResult anomalyResult = AnomalyResult .fromRawTRCFResult( - adTask.getDetectorId(), - adTask.getDetector().getDetectorIntervalInMilliseconds(), + adTask.getId(), + adTask.getDetector().getIntervalInMilliseconds(), adTask.getDetectorLevelTaskId(), score, descriptor.getAnomalyGrade(), @@ -1165,7 +1162,7 @@ private void detectAnomaly( user = adTask.getUser().getName(); roles = adTask.getUser().getRoles(); } - String resultIndex = adTask.getDetector().getResultIndex(); + String resultIndex = adTask.getDetector().getCustomResultIndex(); if (resultIndex == null) { // if result index is null, store anomaly result directly @@ -1248,7 +1245,7 @@ private void runNextPiece( ActionListener internalListener ) { String taskId = adTask.getTaskId(); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); String detectorTaskId = adTask.getDetectorLevelTaskId(); float initProgress = calculateInitProgress(taskId); String taskState = initProgress >= 1.0f ? ADTaskState.RUNNING.name() : ADTaskState.INIT.name(); @@ -1361,11 +1358,11 @@ private float calculateInitProgress(String taskId) { private void checkIfADTaskCancelledAndCleanupCache(ADTask adTask) { String taskId = adTask.getTaskId(); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); String detectorTaskId = adTask.getDetectorLevelTaskId(); // refresh latest HC task run time adTaskCacheManager.refreshLatestHCTaskRunTime(detectorId); - if (adTask.getDetector().isMultientityDetector() + if (adTask.getDetector().isHighCardinality() && adTaskCacheManager.isHCTaskCoordinatingNode(detectorId) && adTaskCacheManager.isHistoricalAnalysisCancelledForHC(detectorId, detectorTaskId)) { // clean up pending and running entity on coordinating node diff --git a/src/main/java/org/opensearch/ad/task/ADTaskCacheManager.java b/src/main/java/org/opensearch/ad/task/ADTaskCacheManager.java index c98367591..4ba79b58c 100644 --- a/src/main/java/org/opensearch/ad/task/ADTaskCacheManager.java +++ b/src/main/java/org/opensearch/ad/task/ADTaskCacheManager.java @@ -171,7 +171,7 @@ public ADTaskCacheManager(Settings settings, ClusterService clusterService, Memo */ public synchronized void add(ADTask adTask) { String taskId = adTask.getTaskId(); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); if (contains(taskId)) { throw new DuplicateTaskException(DETECTOR_IS_RUNNING); } @@ -303,7 +303,7 @@ public boolean contains(String taskId) { * @return true if there is task in cache; otherwise return false */ public boolean containsTaskOfDetector(String detectorId) { - return batchTaskCaches.values().stream().filter(v -> Objects.equals(detectorId, v.getDetectorId())).findAny().isPresent(); + return batchTaskCaches.values().stream().filter(v -> Objects.equals(detectorId, v.getId())).findAny().isPresent(); } /** @@ -316,7 +316,7 @@ public List getTasksOfDetector(String detectorId) { return batchTaskCaches .values() .stream() - .filter(v -> Objects.equals(detectorId, v.getDetectorId())) + .filter(v -> Objects.equals(detectorId, v.getId())) .map(c -> c.getTaskId()) .collect(Collectors.toList()); } @@ -339,7 +339,7 @@ private ADBatchTaskCache getBatchTaskCache(String taskId) { } private List getBatchTaskCacheByDetectorId(String detectorId) { - return batchTaskCaches.values().stream().filter(v -> Objects.equals(detectorId, v.getDetectorId())).collect(Collectors.toList()); + return batchTaskCaches.values().stream().filter(v -> Objects.equals(detectorId, v.getId())).collect(Collectors.toList()); } /** @@ -506,7 +506,7 @@ public ADTaskCancellationState cancelByDetectorId(String detectorId, String dete public boolean isCancelled(String taskId) { // For HC detector, ADBatchTaskCache is entity task. ADBatchTaskCache taskCache = getBatchTaskCache(taskId); - String detectorId = taskCache.getDetectorId(); + String detectorId = taskCache.getId(); String detectorTaskId = taskCache.getDetectorTaskId(); ADHCBatchTaskRunState taskStateCache = getHCBatchTaskRunState(detectorId, detectorTaskId); @@ -810,7 +810,7 @@ public boolean isHCTaskRunning(String detectorId) { Optional entityTask = this.batchTaskCaches .values() .stream() - .filter(cache -> Objects.equals(detectorId, cache.getDetectorId()) && cache.getEntity() != null) + .filter(cache -> Objects.equals(detectorId, cache.getId()) && cache.getEntity() != null) .findFirst(); return entityTask.isPresent(); } diff --git a/src/main/java/org/opensearch/ad/task/ADTaskManager.java b/src/main/java/org/opensearch/ad/task/ADTaskManager.java index f58ec29cc..5d6fd86b1 100644 --- a/src/main/java/org/opensearch/ad/task/ADTaskManager.java +++ b/src/main/java/org/opensearch/ad/task/ADTaskManager.java @@ -306,7 +306,7 @@ public void startDetector( listener.onFailure(new OpenSearchStatusException(errorMessage, RestStatus.BAD_REQUEST)); return; } - String resultIndex = detector.get().getResultIndex(); + String resultIndex = detector.get().getCustomResultIndex(); if (resultIndex == null) { startRealtimeOrHistoricalDetection(detectionDateRange, handler, user, transportService, listener, detector); return; @@ -421,7 +421,7 @@ public void startHistoricalAnalysis( TransportService transportService, ActionListener listener ) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); hashRing.buildAndGetOwningNodeWithSameLocalAdVersion(detectorId, owningNode -> { if (!owningNode.isPresent()) { logger.debug("Can't find eligible node to run as AD task's coordinating node"); @@ -561,7 +561,7 @@ public void checkTaskSlots( TransportService transportService, ActionListener listener ) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); logger.debug("Start checking task slots for detector: {}, task action: {}", detectorId, afterCheckAction); if (!checkingTaskSlot.tryAcquire()) { logger.info("Can't acquire checking task slot semaphore for detector {}", detectorId); @@ -631,9 +631,7 @@ public void checkTaskSlots( // then we will assign 4 tasks slots to this HC detector (4 is less than 8). The data index // only has 2 entities. So we assign 2 more task slots than actual need. But it's ok as we // will auto tune task slot when historical analysis task starts. - int approvedTaskSlots = detector.isMultientityDetector() - ? Math.min(maxRunningEntitiesPerDetector, availableAdTaskSlots) - : 1; + int approvedTaskSlots = detector.isHighCardinality() ? Math.min(maxRunningEntitiesPerDetector, availableAdTaskSlots) : 1; forwardToCoordinatingNode( adTask, detector, @@ -663,12 +661,7 @@ private void forwardToCoordinatingNode( ) { switch (targetActionOfTaskSlotChecking) { case START: - logger - .info( - "Will assign {} task slots to run historical analysis for detector {}", - approvedTaskSlots, - detector.getDetectorId() - ); + logger.info("Will assign {} task slots to run historical analysis for detector {}", approvedTaskSlots, detector.getId()); startHistoricalAnalysis(detector, detectionDateRange, user, approvedTaskSlots, transportService, wrappedActionListener); break; case SCALE_ENTITY_TASK_SLOTS: @@ -676,7 +669,7 @@ private void forwardToCoordinatingNode( .info( "There are {} task slots available now to scale historical analysis task lane for detector {}", approvedTaskSlots, - adTask.getDetectorId() + adTask.getId() ); scaleTaskLaneOnCoordinatingNode(adTask, approvedTaskSlots, transportService, wrappedActionListener); break; @@ -714,7 +707,7 @@ private DiscoveryNode getCoordinatingNode(ADTask adTask) { } } if (targetNode == null) { - throw new ResourceNotFoundException(adTask.getDetectorId(), "AD task coordinating node not found"); + throw new ResourceNotFoundException(adTask.getId(), "AD task coordinating node not found"); } return targetNode; } @@ -746,7 +739,7 @@ public void startDetector( try { if (detectionIndices.doesDetectorStateIndexExist()) { // If detection index exist, check if latest AD task is running - getAndExecuteOnLatestDetectorLevelTask(detector.getDetectorId(), getADTaskTypes(detectionDateRange), (adTask) -> { + getAndExecuteOnLatestDetectorLevelTask(detector.getId(), getADTaskTypes(detectionDateRange), (adTask) -> { if (!adTask.isPresent() || adTask.get().isDone()) { updateLatestFlagOfOldTasksAndCreateNewTask(detector, detectionDateRange, user, listener); } else { @@ -774,16 +767,16 @@ public void startDetector( })); } } catch (Exception e) { - logger.error("Failed to start detector " + detector.getDetectorId(), e); + logger.error("Failed to start detector " + detector.getId(), e); listener.onFailure(e); } } private ADTaskType getADTaskType(AnomalyDetector detector, DateRange detectionDateRange) { if (detectionDateRange == null) { - return detector.isMultientityDetector() ? ADTaskType.REALTIME_HC_DETECTOR : ADTaskType.REALTIME_SINGLE_ENTITY; + return detector.isHighCardinality() ? ADTaskType.REALTIME_HC_DETECTOR : ADTaskType.REALTIME_SINGLE_ENTITY; } else { - return detector.isMultientityDetector() ? ADTaskType.HISTORICAL_HC_DETECTOR : ADTaskType.HISTORICAL_SINGLE_ENTITY; + return detector.isHighCardinality() ? ADTaskType.HISTORICAL_HC_DETECTOR : ADTaskType.HISTORICAL_SINGLE_ENTITY; } } @@ -1093,7 +1086,7 @@ private void resetRealtimeDetectorTaskState( return; } ADTask adTask = runningRealtimeTasks.get(0); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); GetRequest getJobRequest = new GetRequest(CommonName.JOB_INDEX).id(detectorId); client.get(getJobRequest, ActionListener.wrap(r -> { if (r.isExists()) { @@ -1146,11 +1139,11 @@ private void resetHistoricalDetectorTaskState( if (taskStopped) { logger.debug("Reset task state as stopped, task id: {}", adTask.getTaskId()); if (taskProfile.getTaskId() == null // This means coordinating node doesn't have HC detector cache - && detector.isMultientityDetector() + && detector.isHighCardinality() && !isNullOrEmpty(taskProfile.getEntityTaskProfiles())) { // If coordinating node restarted, HC detector cache on it will be gone. But worker node still // runs entity tasks, we'd better stop these entity tasks to clean up resource earlier. - stopHistoricalAnalysis(adTask.getDetectorId(), Optional.of(adTask), null, ActionListener.wrap(r -> { + stopHistoricalAnalysis(adTask.getId(), Optional.of(adTask), null, ActionListener.wrap(r -> { logger.debug("Restop detector successfully"); resetTaskStateAsStopped(adTask, function, transportService, listener); }, e -> { @@ -1199,17 +1192,17 @@ private void resetHistoricalDetectorTaskState( } private boolean isTaskStopped(String taskId, AnomalyDetector detector, ADTaskProfile taskProfile) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); if (taskProfile == null || !Objects.equals(taskId, taskProfile.getTaskId())) { logger.debug("AD task not found for task {} detector {}", taskId, detectorId); // If no node is running this task, reset it as STOPPED. return true; } - if (!detector.isMultientityDetector() && taskProfile.getNodeId() == null) { + if (!detector.isHighCardinality() && taskProfile.getNodeId() == null) { logger.debug("AD task not running for single entity detector {}, task {}", detectorId, taskId); return true; } - if (detector.isMultientityDetector() + if (detector.isHighCardinality() && taskProfile.getTotalEntitiesInited() && isNullOrEmpty(taskProfile.getRunningEntities()) && isNullOrEmpty(taskProfile.getEntityTaskProfiles()) @@ -1336,7 +1329,7 @@ public void cleanDetectorCache( ActionListener listener ) { String coordinatingNode = adTask.getCoordinatingNode(); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); String taskId = adTask.getTaskId(); try { forwardADTaskToCoordinatingNode( @@ -1364,7 +1357,7 @@ public void cleanDetectorCache( } protected void cleanDetectorCache(ADTask adTask, TransportService transportService, AnomalyDetectorFunction function) { - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); String taskId = adTask.getTaskId(); cleanDetectorCache( adTask, @@ -1418,7 +1411,7 @@ public void getLatestHistoricalTaskProfile( * @param listener action listener */ private void getADTaskProfile(ADTask adDetectorLevelTask, ActionListener listener) { - String detectorId = adDetectorLevelTask.getDetectorId(); + String detectorId = adDetectorLevelTask.getId(); hashRing.getAllEligibleDataNodesWithKnownAdVersion(dataNodes -> { ADTaskProfileRequest adTaskProfileRequest = new ADTaskProfileRequest(detectorId, dataNodes); @@ -1486,7 +1479,7 @@ private void updateLatestFlagOfOldTasksAndCreateNewTask( UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(); updateByQueryRequest.indices(DETECTION_STATE_INDEX); BoolQueryBuilder query = new BoolQueryBuilder(); - query.filter(new TermQueryBuilder(DETECTOR_ID_FIELD, detector.getDetectorId())); + query.filter(new TermQueryBuilder(DETECTOR_ID_FIELD, detector.getId())); query.filter(new TermQueryBuilder(IS_LATEST_FIELD, true)); // make sure we reset all latest task as false when user switch from single entity to HC, vice versa. query.filter(new TermsQueryBuilder(TASK_TYPE_FIELD, taskTypeToString(getADTaskTypes(detectionDateRange, true)))); @@ -1506,11 +1499,11 @@ private void updateLatestFlagOfOldTasksAndCreateNewTask( String coordinatingNode = detectionDateRange == null ? null : clusterService.localNode().getId(); createNewADTask(detector, detectionDateRange, user, coordinatingNode, listener); } else { - logger.error("Failed to update old task's state for detector: {}, response: {} ", detector.getDetectorId(), r.toString()); + logger.error("Failed to update old task's state for detector: {}, response: {} ", detector.getId(), r.toString()); listener.onFailure(bulkFailures.get(0).getCause()); } }, e -> { - logger.error("Failed to reset old tasks as not latest for detector " + detector.getDetectorId(), e); + logger.error("Failed to reset old tasks as not latest for detector " + detector.getId(), e); listener.onFailure(e); })); } @@ -1526,7 +1519,7 @@ private void createNewADTask( Instant now = Instant.now(); String taskType = getADTaskType(detector, detectionDateRange).name(); ADTask adTask = new ADTask.Builder() - .detectorId(detector.getDetectorId()) + .detectorId(detector.getId()) .detector(detector) .isLatest(true) .taskType(taskType) @@ -1569,11 +1562,11 @@ public void createADTaskDirectly(ADTask adTask, Consumer func .source(adTask.toXContent(builder, RestHandlerUtils.XCONTENT_WITH_TYPE)) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); client.index(request, ActionListener.wrap(r -> function.accept(r), e -> { - logger.error("Failed to create AD task for detector " + adTask.getDetectorId(), e); + logger.error("Failed to create AD task for detector " + adTask.getId(), e); listener.onFailure(e); })); } catch (Exception e) { - logger.error("Failed to create AD task for detector " + adTask.getDetectorId(), e); + logger.error("Failed to create AD task for detector " + adTask.getId(), e); listener.onFailure(e); } } @@ -1600,7 +1593,7 @@ private void onIndexADTaskResponse( // ADTaskManager#initRealtimeTaskCacheAndCleanupStaleCache for details. Here the // realtime task cache not inited yet when create AD task, so no need to cleanup. if (adTask.isHistoricalTask()) { - adTaskCacheManager.removeHistoricalTaskCache(adTask.getDetectorId()); + adTaskCacheManager.removeHistoricalTaskCache(adTask.getId()); } listener.onFailure(e); } @@ -1610,7 +1603,7 @@ private void onIndexADTaskResponse( // DuplicateTaskException. This is to solve race condition when user send // multiple start request for one historical detector. if (adTask.isHistoricalTask()) { - adTaskCacheManager.add(adTask.getDetectorId(), adTask); + adTaskCacheManager.add(adTask.getId(), adTask); } } catch (Exception e) { delegatedListener.onFailure(e); @@ -1623,7 +1616,7 @@ private void onIndexADTaskResponse( private void cleanOldAdTaskDocs(IndexResponse response, ADTask adTask, ActionListener delegatedListener) { BoolQueryBuilder query = new BoolQueryBuilder(); - query.filter(new TermQueryBuilder(DETECTOR_ID_FIELD, adTask.getDetectorId())); + query.filter(new TermQueryBuilder(DETECTOR_ID_FIELD, adTask.getId())); query.filter(new TermQueryBuilder(IS_LATEST_FIELD, false)); if (adTask.isHistoricalTask()) { @@ -1644,7 +1637,7 @@ private void cleanOldAdTaskDocs(IndexResponse response, ADTask adTask, ActionLis .from(maxOldAdTaskDocsPerDetector) .size(MAX_OLD_AD_TASK_DOCS); searchRequest.source(sourceBuilder).indices(DETECTION_STATE_INDEX); - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); deleteTaskDocs(detectorId, searchRequest, () -> { if (adTask.isHistoricalTask()) { @@ -1679,7 +1672,7 @@ protected void deleteTaskDocs( try (XContentParser parser = createXContentParserFromRegistry(xContentRegistry, searchHit.getSourceRef())) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ADTask adTask = ADTask.parse(parser, searchHit.getId()); - logger.debug("Delete old task: {} of detector: {}", adTask.getTaskId(), adTask.getDetectorId()); + logger.debug("Delete old task: {} of detector: {}", adTask.getTaskId(), adTask.getId()); bulkRequest.add(new DeleteRequest(DETECTION_STATE_INDEX).id(adTask.getTaskId())); } catch (Exception e) { listener.onFailure(e); @@ -1753,7 +1746,7 @@ private void runBatchResultAction(IndexResponse response, ADTask adTask, ActionL .info( "AD task {} of detector {} dispatched to {} node {}", adTask.getTaskId(), - adTask.getDetectorId(), + adTask.getId(), remoteOrLocal, r.getNodeId() ); @@ -1785,7 +1778,7 @@ public void handleADTaskException(ADTask adTask, Exception e) { logger .warn( "There is already one running task for detector, detectorId:" - + adTask.getDetectorId() + + adTask.getId() + ". Will delete task " + adTask.getTaskId() ); @@ -1793,14 +1786,14 @@ public void handleADTaskException(ADTask adTask, Exception e) { return; } if (e instanceof TaskCancelledException) { - logger.info("AD task cancelled, taskId: {}, detectorId: {}", adTask.getTaskId(), adTask.getDetectorId()); + logger.info("AD task cancelled, taskId: {}, detectorId: {}", adTask.getTaskId(), adTask.getId()); state = ADTaskState.STOPPED.name(); String stoppedBy = ((TaskCancelledException) e).getCancelledBy(); if (stoppedBy != null) { updatedFields.put(STOPPED_BY_FIELD, stoppedBy); } } else { - logger.error("Failed to execute AD batch task, task id: " + adTask.getTaskId() + ", detector id: " + adTask.getDetectorId(), e); + logger.error("Failed to execute AD batch task, task id: " + adTask.getTaskId() + ", detector id: " + adTask.getId(), e); } updatedFields.put(ERROR_FIELD, getErrorMessage(e)); updatedFields.put(STATE_FIELD, state); @@ -2126,7 +2119,7 @@ public void initRealtimeTaskCacheAndCleanupStaleCache( clusterService.localNode().getId(), ActionListener.wrap(r -> { logger.info("Recreate realtime task successfully for detector {}", detectorId); - adTaskCacheManager.initRealtimeTaskCache(detectorId, detector.getDetectorIntervalInMilliseconds()); + adTaskCacheManager.initRealtimeTaskCache(detectorId, detector.getIntervalInMilliseconds()); listener.onResponse(true); }, e -> { logger.error("Failed to recreate realtime task for detector " + detectorId, e); @@ -2155,12 +2148,12 @@ public void initRealtimeTaskCacheAndCleanupStaleCache( oldCoordinatingNode, detectorId ); - adTaskCacheManager.initRealtimeTaskCache(detectorId, detector.getDetectorIntervalInMilliseconds()); + adTaskCacheManager.initRealtimeTaskCache(detectorId, detector.getIntervalInMilliseconds()); listener.onResponse(true); }, listener); } else { logger.info("Init realtime task cache for detector {}", detectorId); - adTaskCacheManager.initRealtimeTaskCache(detectorId, detector.getDetectorIntervalInMilliseconds()); + adTaskCacheManager.initRealtimeTaskCache(detectorId, detector.getIntervalInMilliseconds()); listener.onResponse(true); } }, transportService, false, listener); @@ -2297,7 +2290,7 @@ public boolean isRetryableError(String error) { * @param listener action listener */ public void setHCDetectorTaskDone(ADTask adTask, ADTaskState state, ActionListener listener) { - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); String taskId = adTask.isEntityTask() ? adTask.getParentTaskId() : adTask.getTaskId(); String detectorTaskId = adTask.getDetectorLevelTaskId(); @@ -2318,7 +2311,7 @@ public void setHCDetectorTaskDone(ADTask adTask, ADTaskState state, ActionListen long timeoutInMillis = 2000;// wait for 2 seconds to acquire updating HC detector task semaphore if (state == ADTaskState.FINISHED) { this.countEntityTasksByState(detectorTaskId, ImmutableList.of(ADTaskState.FINISHED), ActionListener.wrap(r -> { - logger.info("number of finished entity tasks: {}, for detector {}", r, adTask.getDetectorId()); + logger.info("number of finished entity tasks: {}, for detector {}", r, adTask.getId()); // Set task as FAILED if no finished entity task; otherwise set as FINISHED ADTaskState hcDetectorTaskState = r == 0 ? ADTaskState.FAILED : ADTaskState.FINISHED; // execute in AD batch task thread pool in case waiting for semaphore waste any shared OpenSearch thread pool @@ -2503,7 +2496,7 @@ public void runNextEntityForHCADHistorical( TransportService transportService, ActionListener listener ) { - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); int scaleDelta = scaleTaskSlots( adTask, transportService, @@ -2554,7 +2547,7 @@ protected int scaleTaskSlots( TransportService transportService, ActionListener scaleUpListener ) { - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); if (!scaleEntityTaskLane.tryAcquire()) { logger.debug("Can't get scaleEntityTaskLane semaphore"); return 0; @@ -2799,7 +2792,7 @@ public synchronized void removeStaleRunningEntity( TransportService transportService, ActionListener listener ) { - String detectorId = adTask.getDetectorId(); + String detectorId = adTask.getId(); boolean removed = adTaskCacheManager.removeRunningEntity(detectorId, entity); if (removed && adTaskCacheManager.getPendingEntityCount(detectorId) > 0) { logger.debug("kick off next pending entities"); @@ -2841,7 +2834,7 @@ public String convertEntityToString(ADTask adTask) { * @return entity string value */ public String convertEntityToString(Entity entity, AnomalyDetector detector) { - if (detector.isMultiCategoryDetector()) { + if (detector.hasMultipleCategories()) { try { XContentBuilder builder = entity.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS); return BytesReference.bytes(builder).utf8ToString(); @@ -2851,8 +2844,8 @@ public String convertEntityToString(Entity entity, AnomalyDetector detector) { throw new TimeSeriesException(error); } } - if (detector.isMultientityDetector()) { - String categoryField = detector.getCategoryField().get(0); + if (detector.isHighCardinality()) { + String categoryField = detector.getCategoryFields().get(0); return entity.getAttributes().get(categoryField); } return null; @@ -2866,7 +2859,7 @@ public String convertEntityToString(Entity entity, AnomalyDetector detector) { */ public Entity parseEntityFromString(String entityValue, ADTask adTask) { AnomalyDetector detector = adTask.getDetector(); - if (detector.isMultiCategoryDetector()) { + if (detector.hasMultipleCategories()) { try { XContentParser parser = XContentType.JSON .xContent() @@ -2878,8 +2871,8 @@ public Entity parseEntityFromString(String entityValue, ADTask adTask) { logger.debug(error, e); throw new TimeSeriesException(error); } - } else if (detector.isMultientityDetector()) { - return Entity.createSingleAttributeEntity(detector.getCategoryField().get(0), entityValue); + } else if (detector.isHighCardinality()) { + return Entity.createSingleAttributeEntity(detector.getCategoryFields().get(0), entityValue); } throw new IllegalArgumentException("Fail to parse to Entity for single flow detector"); } @@ -3060,12 +3053,7 @@ private void maintainRunningHistoricalTask(ConcurrentLinkedQueue taskQue ActionListener .wrap( r -> { - logger - .debug( - "Reset historical task state done for task {}, detector {}", - adTask.getTaskId(), - adTask.getDetectorId() - ); + logger.debug("Reset historical task state done for task {}, detector {}", adTask.getTaskId(), adTask.getId()); }, e -> { logger.error("Failed to reset historical task state for task " + adTask.getTaskId(), e); } ) diff --git a/src/main/java/org/opensearch/ad/transport/ADCancelTaskNodeRequest.java b/src/main/java/org/opensearch/ad/transport/ADCancelTaskNodeRequest.java index 70cc45a26..ec71120bb 100644 --- a/src/main/java/org/opensearch/ad/transport/ADCancelTaskNodeRequest.java +++ b/src/main/java/org/opensearch/ad/transport/ADCancelTaskNodeRequest.java @@ -34,7 +34,7 @@ public ADCancelTaskNodeRequest(StreamInput in) throws IOException { } public ADCancelTaskNodeRequest(ADCancelTaskRequest request) { - this.detectorId = request.getDetectorId(); + this.detectorId = request.getId(); this.detectorTaskId = request.getDetectorTaskId(); this.userName = request.getUserName(); this.reason = request.getReason(); @@ -49,7 +49,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(reason); } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/transport/ADCancelTaskRequest.java b/src/main/java/org/opensearch/ad/transport/ADCancelTaskRequest.java index f289e6fc1..9b07add33 100644 --- a/src/main/java/org/opensearch/ad/transport/ADCancelTaskRequest.java +++ b/src/main/java/org/opensearch/ad/transport/ADCancelTaskRequest.java @@ -70,7 +70,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(reason); } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/transport/ADCancelTaskTransportAction.java b/src/main/java/org/opensearch/ad/transport/ADCancelTaskTransportAction.java index f64811320..801910f96 100644 --- a/src/main/java/org/opensearch/ad/transport/ADCancelTaskTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/ADCancelTaskTransportAction.java @@ -79,11 +79,11 @@ protected ADCancelTaskNodeResponse newNodeResponse(StreamInput in) throws IOExce @Override protected ADCancelTaskNodeResponse nodeOperation(ADCancelTaskNodeRequest request) { String userName = request.getUserName(); - String detectorId = request.getDetectorId(); + String detectorId = request.getId(); String detectorTaskId = request.getDetectorTaskId(); String reason = Optional.ofNullable(request.getReason()).orElse(HISTORICAL_ANALYSIS_CANCELLED); ADTaskCancellationState state = adTaskManager.cancelLocalTaskByDetectorId(detectorId, detectorTaskId, reason, userName); - logger.debug("Cancelled AD task for detector: {}", request.getDetectorId()); + logger.debug("Cancelled AD task for detector: {}", request.getId()); return new ADCancelTaskNodeResponse(clusterService.localNode(), state); } } diff --git a/src/main/java/org/opensearch/ad/transport/ADResultBulkTransportAction.java b/src/main/java/org/opensearch/ad/transport/ADResultBulkTransportAction.java index c981f6fcc..279c3d45b 100644 --- a/src/main/java/org/opensearch/ad/transport/ADResultBulkTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/ADResultBulkTransportAction.java @@ -94,7 +94,7 @@ protected void doExecute(Task task, ADResultBulkRequest request, ActionListener< if (indexingPressurePercent <= softLimit) { for (ResultWriteRequest resultWriteRequest : results) { - addResult(bulkRequest, resultWriteRequest.getResult(), resultWriteRequest.getResultIndex()); + addResult(bulkRequest, resultWriteRequest.getResult(), resultWriteRequest.getCustomResultIndex()); } } else if (indexingPressurePercent <= hardLimit) { // exceed soft limit (60%) but smaller than hard limit (90%) @@ -102,7 +102,7 @@ protected void doExecute(Task task, ADResultBulkRequest request, ActionListener< for (ResultWriteRequest resultWriteRequest : results) { AnomalyResult result = resultWriteRequest.getResult(); if (result.isHighPriority() || random.nextFloat() < acceptProbability) { - addResult(bulkRequest, result, resultWriteRequest.getResultIndex()); + addResult(bulkRequest, result, resultWriteRequest.getCustomResultIndex()); } } } else { @@ -110,7 +110,7 @@ protected void doExecute(Task task, ADResultBulkRequest request, ActionListener< for (ResultWriteRequest resultWriteRequest : results) { AnomalyResult result = resultWriteRequest.getResult(); if (result.isHighPriority()) { - addResult(bulkRequest, result, resultWriteRequest.getResultIndex()); + addResult(bulkRequest, result, resultWriteRequest.getCustomResultIndex()); } } } diff --git a/src/main/java/org/opensearch/ad/transport/ADTaskProfileNodeRequest.java b/src/main/java/org/opensearch/ad/transport/ADTaskProfileNodeRequest.java index 42f55cfd7..589a13520 100644 --- a/src/main/java/org/opensearch/ad/transport/ADTaskProfileNodeRequest.java +++ b/src/main/java/org/opensearch/ad/transport/ADTaskProfileNodeRequest.java @@ -26,7 +26,7 @@ public ADTaskProfileNodeRequest(StreamInput in) throws IOException { } public ADTaskProfileNodeRequest(ADTaskProfileRequest request) { - this.detectorId = request.getDetectorId(); + this.detectorId = request.getId(); } @Override @@ -35,7 +35,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(detectorId); } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/transport/ADTaskProfileRequest.java b/src/main/java/org/opensearch/ad/transport/ADTaskProfileRequest.java index c2c05ff05..91bfa308e 100644 --- a/src/main/java/org/opensearch/ad/transport/ADTaskProfileRequest.java +++ b/src/main/java/org/opensearch/ad/transport/ADTaskProfileRequest.java @@ -52,7 +52,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(detectorId); } - public String getDetectorId() { + public String getId() { return detectorId; } } diff --git a/src/main/java/org/opensearch/ad/transport/ADTaskProfileTransportAction.java b/src/main/java/org/opensearch/ad/transport/ADTaskProfileTransportAction.java index f4124c460..393973d7b 100644 --- a/src/main/java/org/opensearch/ad/transport/ADTaskProfileTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/ADTaskProfileTransportAction.java @@ -80,7 +80,7 @@ protected ADTaskProfileNodeResponse newNodeResponse(StreamInput in) throws IOExc protected ADTaskProfileNodeResponse nodeOperation(ADTaskProfileNodeRequest request) { String remoteNodeId = request.getParentTask().getNodeId(); Version remoteAdVersion = hashRing.getAdVersion(remoteNodeId); - ADTaskProfile adTaskProfile = adTaskManager.getLocalADTaskProfilesByDetectorId(request.getDetectorId()); + ADTaskProfile adTaskProfile = adTaskManager.getLocalADTaskProfilesByDetectorId(request.getId()); return new ADTaskProfileNodeResponse(clusterService.localNode(), adTaskProfile, remoteAdVersion); } } diff --git a/src/main/java/org/opensearch/ad/transport/AnomalyResultResponse.java b/src/main/java/org/opensearch/ad/transport/AnomalyResultResponse.java index e64df1677..da66260b4 100644 --- a/src/main/java/org/opensearch/ad/transport/AnomalyResultResponse.java +++ b/src/main/java/org/opensearch/ad/transport/AnomalyResultResponse.java @@ -196,7 +196,7 @@ public Long getRcfTotalUpdates() { return rcfTotalUpdates; } - public Long getDetectorIntervalInMinutes() { + public Long getIntervalInMinutes() { return detectorIntervalInMinutes; } diff --git a/src/main/java/org/opensearch/ad/transport/AnomalyResultTransportAction.java b/src/main/java/org/opensearch/ad/transport/AnomalyResultTransportAction.java index 026d01a89..84a80b912 100644 --- a/src/main/java/org/opensearch/ad/transport/AnomalyResultTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/AnomalyResultTransportAction.java @@ -73,10 +73,10 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; import org.opensearch.common.io.stream.NotSerializableExceptionWrapper; -import org.opensearch.common.lease.Releasable; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.NetworkExceptionHelper; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.common.lease.Releasable; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.index.IndexNotFoundException; import org.opensearch.node.NodeClosedException; @@ -402,7 +402,7 @@ private ActionListener> onGetDetector( } AnomalyDetector anomalyDetector = detectorOptional.get(); - if (anomalyDetector.isMultientityDetector()) { + if (anomalyDetector.isHighCardinality()) { hcDetectors.add(adID); adStats.getStat(StatNames.AD_HC_EXECUTE_REQUEST_COUNT.getName()).increment(); } @@ -445,7 +445,7 @@ private void executeAnomalyDetection( long dataEndTime ) { // HC logic starts here - if (anomalyDetector.isMultientityDetector()) { + if (anomalyDetector.isHighCardinality()) { Optional previousException = stateManager.fetchExceptionAndClear(adID); if (previousException.isPresent()) { Exception exception = previousException.get(); @@ -460,8 +460,7 @@ private void executeAnomalyDetection( } // assume request are in epoch milliseconds - long nextDetectionStartTime = request.getEnd() + (long) (anomalyDetector.getDetectorIntervalInMilliseconds() - * intervalRatioForRequest); + long nextDetectionStartTime = request.getEnd() + (long) (anomalyDetector.getIntervalInMilliseconds() * intervalRatioForRequest); CompositeRetriever compositeRetriever = new CompositeRetriever( dataStartTime, @@ -483,7 +482,7 @@ private void executeAnomalyDetection( try { pageIterator = compositeRetriever.iterator(); } catch (Exception e) { - listener.onFailure(new EndRunException(anomalyDetector.getDetectorId(), CommonMessages.INVALID_SEARCH_QUERY_MSG, e, false)); + listener.onFailure(new EndRunException(anomalyDetector.getId(), CommonMessages.INVALID_SEARCH_QUERY_MSG, e, false)); return; } @@ -500,13 +499,7 @@ private void executeAnomalyDetection( } else { listener .onResponse( - new AnomalyResultResponse( - new ArrayList(), - null, - null, - anomalyDetector.getDetectorIntervalInMinutes(), - true - ) + new AnomalyResultResponse(new ArrayList(), null, null, anomalyDetector.getIntervalInMinutes(), true) ); } return; @@ -684,7 +677,7 @@ private Exception coldStartIfNoModel(AtomicReference failure, Anomaly } // fetch previous cold start exception - String adID = detector.getDetectorId(); + String adID = detector.getId(); final Optional previousException = stateManager.fetchExceptionAndClear(adID); if (previousException.isPresent()) { Exception exception = previousException.get(); @@ -693,7 +686,7 @@ private Exception coldStartIfNoModel(AtomicReference failure, Anomaly return exception; } } - LOG.info("Trigger cold start for {}", detector.getDetectorId()); + LOG.info("Trigger cold start for {}", detector.getId()); coldStart(detector); return previousException.orElse(new InternalFailure(adID, ADCommonMessages.NO_MODEL_ERR_MSG)); } @@ -716,7 +709,7 @@ private void findException(Throwable cause, String adID, AtomicReference actualException = NotSerializedExceptionName - .convertWrappedAnomalyDetectionException((NotSerializableExceptionWrapper) causeException, adID); + .convertWrappedTimeSeriesException((NotSerializableExceptionWrapper) causeException, adID); if (actualException.isPresent()) { TimeSeriesException adException = actualException.get(); failure.set(adException); @@ -814,7 +807,7 @@ public void onResponse(RCFResultResponse response) { featureInResponse, null, response.getTotalUpdates(), - detector.getDetectorIntervalInMinutes(), + detector.getIntervalInMinutes(), false, response.getRelativeIndex(), response.getAttribution(), @@ -975,7 +968,7 @@ private boolean shouldStart( } private void coldStart(AnomalyDetector detector) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); // If last cold start is not finished, we don't trigger another one if (stateManager.isColdStartRunning(detectorId)) { @@ -1056,7 +1049,7 @@ private void coldStart(AnomalyDetector detector) { * @return previous cold start exception */ private Optional coldStartIfNoCheckPoint(AnomalyDetector detector) { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); Optional previousException = stateManager.fetchExceptionAndClear(detectorId); diff --git a/src/main/java/org/opensearch/ad/transport/EntityResultRequest.java b/src/main/java/org/opensearch/ad/transport/EntityResultRequest.java index bafab7f80..a834e3503 100644 --- a/src/main/java/org/opensearch/ad/transport/EntityResultRequest.java +++ b/src/main/java/org/opensearch/ad/transport/EntityResultRequest.java @@ -60,7 +60,7 @@ public EntityResultRequest(String detectorId, Map entities, lo this.end = end; } - public String getDetectorId() { + public String getId() { return this.detectorId; } diff --git a/src/main/java/org/opensearch/ad/transport/EntityResultTransportAction.java b/src/main/java/org/opensearch/ad/transport/EntityResultTransportAction.java index df0280a2e..d788948dc 100644 --- a/src/main/java/org/opensearch/ad/transport/EntityResultTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/EntityResultTransportAction.java @@ -128,12 +128,12 @@ public EntityResultTransportAction( protected void doExecute(Task task, EntityResultRequest request, ActionListener listener) { if (adCircuitBreakerService.isOpen()) { threadPool.executor(AnomalyDetectorPlugin.AD_THREAD_POOL_NAME).execute(() -> cache.get().releaseMemoryForOpenCircuitBreaker()); - listener.onFailure(new LimitExceededException(request.getDetectorId(), CommonMessages.MEMORY_CIRCUIT_BROKEN_ERR_MSG, false)); + listener.onFailure(new LimitExceededException(request.getId(), CommonMessages.MEMORY_CIRCUIT_BROKEN_ERR_MSG, false)); return; } try { - String detectorId = request.getDetectorId(); + String detectorId = request.getId(); Optional previousException = stateManager.fetchExceptionAndClear(detectorId); @@ -183,12 +183,12 @@ private ActionListener> onGetDetector( Entity categoricalValues = entityEntry.getKey(); if (isEntityFromOldNodeMsg(categoricalValues) - && detector.getCategoryField() != null - && detector.getCategoryField().size() == 1) { + && detector.getCategoryFields() != null + && detector.getCategoryFields().size() == 1) { Map attrValues = categoricalValues.getAttributes(); // handle a request from a version before OpenSearch 1.1. categoricalValues = Entity - .createSingleAttributeEntity(detector.getCategoryField().get(0), attrValues.get(ADCommonName.EMPTY_FIELD)); + .createSingleAttributeEntity(detector.getCategoryFields().get(0), attrValues.get(ADCommonName.EMPTY_FIELD)); } Optional modelIdOptional = categoricalValues.getModelId(detectorId); @@ -229,11 +229,11 @@ private ActionListener> onGetDetector( resultWriteQueue .put( new ResultWriteRequest( - System.currentTimeMillis() + detector.getDetectorIntervalInMilliseconds(), + System.currentTimeMillis() + detector.getIntervalInMilliseconds(), detectorId, result.getGrade() > 0 ? RequestPriority.HIGH : RequestPriority.MEDIUM, resultToSave, - detector.getResultIndex() + detector.getCustomResultIndex() ) ); } @@ -245,7 +245,7 @@ private ActionListener> onGetDetector( entityColdStartWorker .put( new EntityFeatureRequest( - System.currentTimeMillis() + detector.getDetectorIntervalInMilliseconds(), + System.currentTimeMillis() + detector.getIntervalInMilliseconds(), detectorId, RequestPriority.MEDIUM, categoricalValues, @@ -273,7 +273,7 @@ private ActionListener> onGetDetector( hotEntityRequests .add( new EntityFeatureRequest( - System.currentTimeMillis() + detector.getDetectorIntervalInMilliseconds(), + System.currentTimeMillis() + detector.getIntervalInMilliseconds(), detectorId, // hot entities has MEDIUM priority RequestPriority.MEDIUM, @@ -293,7 +293,7 @@ private ActionListener> onGetDetector( coldEntityRequests .add( new EntityFeatureRequest( - System.currentTimeMillis() + detector.getDetectorIntervalInMilliseconds(), + System.currentTimeMillis() + detector.getIntervalInMilliseconds(), detectorId, // cold entities has LOW priority RequestPriority.LOW, diff --git a/src/main/java/org/opensearch/ad/transport/ForwardADTaskRequest.java b/src/main/java/org/opensearch/ad/transport/ForwardADTaskRequest.java index 8f615cfda..7ebc2f71c 100644 --- a/src/main/java/org/opensearch/ad/transport/ForwardADTaskRequest.java +++ b/src/main/java/org/opensearch/ad/transport/ForwardADTaskRequest.java @@ -64,7 +64,7 @@ public ForwardADTaskRequest( Version remoteAdVersion ) { if (remoteAdVersion == null) { - throw new VersionException(detector.getDetectorId(), "Can't forward AD task request to node running null AD version "); + throw new VersionException(detector.getId(), "Can't forward AD task request to node running null AD version "); } this.detector = detector; this.detectionDateRange = detectionDateRange; @@ -155,7 +155,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (detector == null) { validationException = addValidationError(ADCommonMessages.DETECTOR_MISSING, validationException); - } else if (detector.getDetectorId() == null) { + } else if (detector.getId() == null) { validationException = addValidationError(ADCommonMessages.AD_ID_MISSING_MSG, validationException); } if (adTaskAction == null) { diff --git a/src/main/java/org/opensearch/ad/transport/ForwardADTaskTransportAction.java b/src/main/java/org/opensearch/ad/transport/ForwardADTaskTransportAction.java index c341f9fa9..be3b45c99 100644 --- a/src/main/java/org/opensearch/ad/transport/ForwardADTaskTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/ForwardADTaskTransportAction.java @@ -79,7 +79,7 @@ protected void doExecute(Task task, ForwardADTaskRequest request, ActionListener ADTaskAction adTaskAction = request.getAdTaskAction(); AnomalyDetector detector = request.getDetector(); DateRange detectionDateRange = request.getDetectionDateRange(); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); ADTask adTask = request.getAdTask(); User user = request.getUser(); Integer availableTaskSlots = request.getAvailableTaskSLots(); @@ -108,14 +108,14 @@ protected void doExecute(Task task, ForwardADTaskRequest request, ActionListener // Start historical analysis for detector logger.debug("Received START action for detector {}", detectorId); adTaskManager.startDetector(detector, detectionDateRange, user, transportService, ActionListener.wrap(r -> { - adTaskCacheManager.setDetectorTaskSlots(detector.getDetectorId(), availableTaskSlots); + adTaskCacheManager.setDetectorTaskSlots(detector.getId(), availableTaskSlots); listener.onResponse(r); }, e -> listener.onFailure(e))); break; case NEXT_ENTITY: logger.debug("Received NEXT_ENTITY action for detector {}, task {}", detectorId, adTask.getTaskId()); // Run next entity for HC detector historical analysis. - if (detector.isMultientityDetector()) { // AD task could be HC detector level task or entity task + if (detector.isHighCardinality()) { // AD task could be HC detector level task or entity task adTaskCacheManager.removeRunningEntity(detectorId, entityValue); if (!adTaskCacheManager.hasEntity(detectorId)) { adTaskCacheManager.setDetectorTaskSlots(detectorId, 0); @@ -157,13 +157,13 @@ protected void doExecute(Task task, ForwardADTaskRequest request, ActionListener if (adTask.isEntityTask()) { // AD task must be entity level task. adTaskCacheManager.removeRunningEntity(detectorId, entityValue); if (adTaskManager.isRetryableError(adTask.getError()) - && !adTaskCacheManager.exceedRetryLimit(adTask.getDetectorId(), adTask.getTaskId())) { + && !adTaskCacheManager.exceedRetryLimit(adTask.getId(), adTask.getTaskId())) { // If retryable exception happens when run entity task, will push back entity to the end // of pending entities queue, then we can retry it later. - adTaskCacheManager.pushBackEntity(adTask.getTaskId(), adTask.getDetectorId(), entityValue); + adTaskCacheManager.pushBackEntity(adTask.getTaskId(), adTask.getId(), entityValue); } else { // If exception is not retryable or exceeds retry limit, will remove this entity. - adTaskCacheManager.removeEntity(adTask.getDetectorId(), entityValue); + adTaskCacheManager.removeEntity(adTask.getId(), entityValue); logger.warn("Entity task failed, task id: {}, entity: {}", adTask.getTaskId(), adTask.getEntity().toString()); } if (!adTaskCacheManager.hasEntity(detectorId)) { @@ -193,14 +193,14 @@ protected void doExecute(Task task, ForwardADTaskRequest request, ActionListener adTaskCacheManager.scaleUpDetectorTaskSlots(detectorId, newSlots); } } - listener.onResponse(new AnomalyDetectorJobResponse(detector.getDetectorId(), 0, 0, 0, RestStatus.OK)); + listener.onResponse(new AnomalyDetectorJobResponse(detector.getId(), 0, 0, 0, RestStatus.OK)); break; case CANCEL: logger.debug("Received CANCEL action for detector {}", detectorId); // Cancel HC detector's historical analysis. // Don't support single detector for this action as single entity task will be cancelled directly // on worker node. - if (detector.isMultientityDetector()) { + if (detector.isHighCardinality()) { adTaskCacheManager.clearPendingEntities(detectorId); adTaskCacheManager.removeRunningEntity(detectorId, entityValue); if (!adTaskCacheManager.hasEntity(detectorId) || !adTask.isEntityTask()) { @@ -249,7 +249,7 @@ protected void doExecute(Task task, ForwardADTaskRequest request, ActionListener stateManager.clear(detectorId); featureManager.clear(detectorId); } - listener.onResponse(new AnomalyDetectorJobResponse(detector.getDetectorId(), 0, 0, 0, RestStatus.OK)); + listener.onResponse(new AnomalyDetectorJobResponse(detector.getId(), 0, 0, 0, RestStatus.OK)); break; default: listener.onFailure(new OpenSearchStatusException("Unsupported AD task action " + adTaskAction, RestStatus.BAD_REQUEST)); diff --git a/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportAction.java b/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportAction.java index 24c0800e3..74dac7937 100644 --- a/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportAction.java @@ -43,7 +43,6 @@ import org.opensearch.action.support.HandledTransportAction; import org.opensearch.ad.AnomalyDetectorProfileRunner; import org.opensearch.ad.EntityProfileRunner; -import org.opensearch.ad.Name; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; @@ -69,6 +68,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.RestStatus; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.Name; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.transport.TransportService; diff --git a/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorRequest.java b/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorRequest.java index 9dae033e5..d7a097bc8 100644 --- a/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorRequest.java +++ b/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorRequest.java @@ -48,7 +48,7 @@ public AnomalyDetector getDetector() { return detector; } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportAction.java b/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportAction.java index 5406d4fe4..0ec2fef48 100644 --- a/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportAction.java @@ -102,7 +102,7 @@ protected void doExecute( PreviewAnomalyDetectorRequest request, ActionListener actionListener ) { - String detectorId = request.getDetectorId(); + String detectorId = request.getId(); User user = getUserContext(client); ActionListener listener = wrapRestActionListener(actionListener, FAIL_TO_PREVIEW_DETECTOR); try (ThreadContext.StoredContext context = client.threadPool().getThreadContext().stashContext()) { @@ -128,18 +128,18 @@ void previewExecute( ActionListener listener ) { if (adCircuitBreakerService.isOpen()) { - listener.onFailure(new LimitExceededException(request.getDetectorId(), CommonMessages.MEMORY_CIRCUIT_BROKEN_ERR_MSG, false)); + listener.onFailure(new LimitExceededException(request.getId(), CommonMessages.MEMORY_CIRCUIT_BROKEN_ERR_MSG, false)); return; } try { if (!lock.tryAcquire()) { - listener.onFailure(new ClientException(request.getDetectorId(), ADCommonMessages.REQUEST_THROTTLED_MSG)); + listener.onFailure(new ClientException(request.getId(), ADCommonMessages.REQUEST_THROTTLED_MSG)); return; } try { AnomalyDetector detector = request.getDetector(); - String detectorId = request.getDetectorId(); + String detectorId = request.getId(); Instant startTime = request.getStartTime(); Instant endTime = request.getEndTime(); ActionListener releaseListener = ActionListener.runAfter(listener, () -> lock.release()); @@ -190,11 +190,11 @@ public void accept(List anomalyResult) throws Exception { listener.onResponse(response); } }, exception -> { - logger.error("Unexpected error running anomaly detector " + detector.getDetectorId(), exception); + logger.error("Unexpected error running anomaly detector " + detector.getId(), exception); listener .onFailure( new OpenSearchStatusException( - "Unexpected error running anomaly detector " + detector.getDetectorId() + ". " + exception.getMessage(), + "Unexpected error running anomaly detector " + detector.getId() + ". " + exception.getMessage(), RestStatus.INTERNAL_SERVER_ERROR ) ); diff --git a/src/main/java/org/opensearch/ad/transport/ProfileNodeRequest.java b/src/main/java/org/opensearch/ad/transport/ProfileNodeRequest.java index 97f77aebe..6ba40fe1a 100644 --- a/src/main/java/org/opensearch/ad/transport/ProfileNodeRequest.java +++ b/src/main/java/org/opensearch/ad/transport/ProfileNodeRequest.java @@ -39,8 +39,8 @@ public ProfileNodeRequest(ProfileRequest request) { this.request = request; } - public String getDetectorId() { - return request.getDetectorId(); + public String getId() { + return request.getId(); } /** diff --git a/src/main/java/org/opensearch/ad/transport/ProfileRequest.java b/src/main/java/org/opensearch/ad/transport/ProfileRequest.java index 091c89e5c..f38b4399c 100644 --- a/src/main/java/org/opensearch/ad/transport/ProfileRequest.java +++ b/src/main/java/org/opensearch/ad/transport/ProfileRequest.java @@ -74,7 +74,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(forMultiEntityDetector); } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/transport/ProfileTransportAction.java b/src/main/java/org/opensearch/ad/transport/ProfileTransportAction.java index 9ebff788c..34f0eef87 100644 --- a/src/main/java/org/opensearch/ad/transport/ProfileTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/ProfileTransportAction.java @@ -104,7 +104,7 @@ protected ProfileNodeResponse newNodeResponse(StreamInput in) throws IOException @Override protected ProfileNodeResponse nodeOperation(ProfileNodeRequest request) { - String detectorId = request.getDetectorId(); + String detectorId = request.getId(); Set profiles = request.getProfilesToBeRetrieved(); int shingleSize = -1; long activeEntity = 0; diff --git a/src/main/java/org/opensearch/ad/transport/SearchTopAnomalyResultRequest.java b/src/main/java/org/opensearch/ad/transport/SearchTopAnomalyResultRequest.java index d8309b3dd..509f65ebb 100644 --- a/src/main/java/org/opensearch/ad/transport/SearchTopAnomalyResultRequest.java +++ b/src/main/java/org/opensearch/ad/transport/SearchTopAnomalyResultRequest.java @@ -80,7 +80,7 @@ public SearchTopAnomalyResultRequest( this.endTime = endTime; } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/main/java/org/opensearch/ad/transport/SearchTopAnomalyResultTransportAction.java b/src/main/java/org/opensearch/ad/transport/SearchTopAnomalyResultTransportAction.java index 1c22f316e..4cc997391 100644 --- a/src/main/java/org/opensearch/ad/transport/SearchTopAnomalyResultTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/SearchTopAnomalyResultTransportAction.java @@ -220,7 +220,7 @@ public SearchTopAnomalyResultTransportAction( protected void doExecute(Task task, SearchTopAnomalyResultRequest request, ActionListener listener) { GetAnomalyDetectorRequest getAdRequest = new GetAnomalyDetectorRequest( - request.getDetectorId(), + request.getId(), // The default version value used in org.opensearch.rest.action.RestActions.parseVersion() -3L, false, @@ -233,16 +233,14 @@ protected void doExecute(Task task, SearchTopAnomalyResultRequest request, Actio client.execute(GetAnomalyDetectorAction.INSTANCE, getAdRequest, ActionListener.wrap(getAdResponse -> { // Make sure detector exists if (getAdResponse.getDetector() == null) { - throw new IllegalArgumentException( - String.format(Locale.ROOT, "No anomaly detector found with ID %s", request.getDetectorId()) - ); + throw new IllegalArgumentException(String.format(Locale.ROOT, "No anomaly detector found with ID %s", request.getId())); } // Make sure detector is HC - List categoryFieldsFromResponse = getAdResponse.getDetector().getCategoryField(); + List categoryFieldsFromResponse = getAdResponse.getDetector().getCategoryFields(); if (categoryFieldsFromResponse == null || categoryFieldsFromResponse.isEmpty()) { throw new IllegalArgumentException( - String.format(Locale.ROOT, "No category fields found for detector ID %s", request.getDetectorId()) + String.format(Locale.ROOT, "No category fields found for detector ID %s", request.getId()) ); } @@ -254,13 +252,7 @@ protected void doExecute(Task task, SearchTopAnomalyResultRequest request, Actio for (String categoryField : request.getCategoryFields()) { if (!categoryFieldsFromResponse.contains(categoryField)) { throw new IllegalArgumentException( - String - .format( - Locale.ROOT, - "Category field %s doesn't exist for detector ID %s", - categoryField, - request.getDetectorId() - ) + String.format(Locale.ROOT, "Category field %s doesn't exist for detector ID %s", categoryField, request.getId()) ); } } @@ -272,7 +264,7 @@ protected void doExecute(Task task, SearchTopAnomalyResultRequest request, Actio ADTask historicalTask = getAdResponse.getHistoricalAdTask(); if (historicalTask == null) { throw new ResourceNotFoundException( - String.format(Locale.ROOT, "No historical tasks found for detector ID %s", request.getDetectorId()) + String.format(Locale.ROOT, "No historical tasks found for detector ID %s", request.getId()) ); } if (Strings.isNullOrEmpty(request.getTaskId())) { @@ -310,7 +302,7 @@ protected void doExecute(Task task, SearchTopAnomalyResultRequest request, Actio SearchRequest searchRequest = generateSearchRequest(request); // Adding search over any custom result indices - String rawCustomResultIndex = getAdResponse.getDetector().getResultIndex(); + String rawCustomResultIndex = getAdResponse.getDetector().getCustomResultIndex(); String customResultIndex = rawCustomResultIndex == null ? null : rawCustomResultIndex.trim(); if (!Strings.isNullOrEmpty(customResultIndex)) { searchRequest.indices(defaultIndex, customResultIndex); @@ -497,7 +489,7 @@ private QueryBuilder generateQuery(SearchTopAnomalyResultRequest request) { TermQueryBuilder taskIdFilter = QueryBuilders.termQuery(AnomalyResult.TASK_ID_FIELD, request.getTaskId()); query.filter(taskIdFilter); } else { - TermQueryBuilder detectorIdFilter = QueryBuilders.termQuery(AnomalyResult.DETECTOR_ID_FIELD, request.getDetectorId()); + TermQueryBuilder detectorIdFilter = QueryBuilders.termQuery(AnomalyResult.DETECTOR_ID_FIELD, request.getId()); ExistsQueryBuilder taskIdExistsFilter = QueryBuilders.existsQuery(AnomalyResult.TASK_ID_FIELD); query.filter(detectorIdFilter).mustNot(taskIdExistsFilter); } diff --git a/src/main/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandler.java b/src/main/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandler.java index 833d8ba85..af0789ce9 100644 --- a/src/main/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandler.java +++ b/src/main/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandler.java @@ -68,7 +68,7 @@ public void bulkIndexAnomalyResult(String resultIndex, List anoma listener.onResponse(null); return; } - String detectorId = anomalyResults.get(0).getDetectorId(); + String detectorId = anomalyResults.get(0).getId(); try { if (resultIndex != null) { // Only create custom AD result index when create detector, won’t recreate custom AD result index in realtime diff --git a/src/main/java/org/opensearch/ad/util/ClientUtil.java b/src/main/java/org/opensearch/ad/util/ClientUtil.java index b77735c6a..d85d4fdf7 100644 --- a/src/main/java/org/opensearch/ad/util/ClientUtil.java +++ b/src/main/java/org/opensearch/ad/util/ClientUtil.java @@ -194,11 +194,11 @@ public Optional ) { try { - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); if (!throttler.insertFilteredQuery(detectorId, request)) { LOG.info("There is one query running for detectorId: {}. Trying to cancel the long running query", detectorId); cancelRunningQuery(client, detectorId, LOG); - throw new InternalFailure(detector.getDetectorId(), "There is already a query running on AnomalyDetector"); + throw new InternalFailure(detector.getId(), "There is already a query running on AnomalyDetector"); } AtomicReference respReference = new AtomicReference<>(); final CountDownLatch latch = new CountDownLatch(1); @@ -237,7 +237,7 @@ public Optional * @return true if given detector has a running query else false */ public boolean hasRunningQuery(AnomalyDetector detector) { - return throttler.getFilteredQuery(detector.getDetectorId()).isPresent(); + return throttler.getFilteredQuery(detector.getId()).isPresent(); } /** diff --git a/src/main/java/org/opensearch/forecast/model/Forecaster.java b/src/main/java/org/opensearch/forecast/model/Forecaster.java new file mode 100644 index 000000000..cd17bf573 --- /dev/null +++ b/src/main/java/org/opensearch/forecast/model/Forecaster.java @@ -0,0 +1,405 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.forecast.model; + +import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.opensearch.forecast.constant.ForecastCommonName.CUSTOM_RESULT_INDEX_PREFIX; +import static org.opensearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.opensearch.common.ParsingException; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParseException; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.forecast.constant.ForecastCommonMessages; +import org.opensearch.forecast.settings.ForecastNumericSetting; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.timeseries.common.exception.ValidationException; +import org.opensearch.timeseries.constant.CommonMessages; +import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.constant.CommonValue; +import org.opensearch.timeseries.dataprocessor.ImputationOption; +import org.opensearch.timeseries.model.Config; +import org.opensearch.timeseries.model.Feature; +import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.TimeConfiguration; +import org.opensearch.timeseries.model.ValidationAspect; +import org.opensearch.timeseries.model.ValidationIssueType; +import org.opensearch.timeseries.settings.TimeSeriesSettings; +import org.opensearch.timeseries.util.ParseUtils; + +import com.google.common.base.Objects; + +/** + * Similar to AnomalyDetector, Forecaster defines config object. We cannot inherit from + * AnomalyDetector as AnomalyDetector uses detection interval but Forecaster doesn't + * need it and has to set it to null. Detection interval being null would fail + * AnomalyDetector's constructor because detection interval cannot be null. + */ +public class Forecaster extends Config { + public static final String FORECAST_PARSE_FIELD_NAME = "Forecaster"; + public static final NamedXContentRegistry.Entry XCONTENT_REGISTRY = new NamedXContentRegistry.Entry( + Forecaster.class, + new ParseField(FORECAST_PARSE_FIELD_NAME), + it -> parse(it) + ); + + public static final String HORIZON_FIELD = "horizon"; + public static final String FORECAST_INTERVAL_FIELD = "forecast_interval"; + public static final int DEFAULT_HORIZON_SHINGLE_RATIO = 3; + + private Integer horizon; + + public Forecaster( + String forecasterId, + Long version, + String name, + String description, + String timeField, + List indices, + List features, + QueryBuilder filterQuery, + TimeConfiguration forecastInterval, + TimeConfiguration windowDelay, + Integer shingleSize, + Map uiMetadata, + Integer schemaVersion, + Instant lastUpdateTime, + List categoryFields, + User user, + String resultIndex, + Integer horizon, + ImputationOption imputationOption + ) { + super( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + forecastInterval, + imputationOption + ); + + checkAndThrowValidationErrors(ValidationAspect.FORECASTER); + + if (forecastInterval == null) { + errorMessage = ForecastCommonMessages.NULL_FORECAST_INTERVAL; + issueType = ValidationIssueType.FORECAST_INTERVAL; + } else if (((IntervalTimeConfiguration) forecastInterval).getInterval() <= 0) { + errorMessage = ForecastCommonMessages.INVALID_FORECAST_INTERVAL; + issueType = ValidationIssueType.FORECAST_INTERVAL; + } + + int maxCategoryFields = ForecastNumericSetting.maxCategoricalFields(); + if (categoryFields != null && categoryFields.size() > maxCategoryFields) { + errorMessage = CommonMessages.getTooManyCategoricalFieldErr(maxCategoryFields); + issueType = ValidationIssueType.CATEGORY; + } + + if (invalidHorizon(horizon)) { + errorMessage = "Horizon size must be a positive integer no larger than " + + TimeSeriesSettings.MAX_SHINGLE_SIZE * DEFAULT_HORIZON_SHINGLE_RATIO + + ". Got " + + horizon; + issueType = ValidationIssueType.SHINGLE_SIZE_FIELD; + } + + checkAndThrowValidationErrors(ValidationAspect.FORECASTER); + + this.horizon = horizon; + } + + public Forecaster(StreamInput input) throws IOException { + super(input); + horizon = input.readInt(); + } + + @Override + public void writeTo(StreamOutput output) throws IOException { + super.writeTo(output); + output.writeInt(horizon); + } + + public boolean invalidHorizon(Integer horizonToTest) { + return horizonToTest != null + && (horizonToTest < 1 || horizonToTest > TimeSeriesSettings.MAX_SHINGLE_SIZE * DEFAULT_HORIZON_SHINGLE_RATIO); + } + + /** + * Parse raw json content into forecaster instance. + * + * @param parser json based content parser + * @return forecaster instance + * @throws IOException IOException if content can't be parsed correctly + */ + public static Forecaster parse(XContentParser parser) throws IOException { + return parse(parser, null); + } + + public static Forecaster parse(XContentParser parser, String forecasterId) throws IOException { + return parse(parser, forecasterId, null); + } + + /** + * Parse raw json content and given forecaster id into forecaster instance. + * + * @param parser json based content parser + * @param forecasterId forecaster id + * @param version forecaster document version + * @return forecaster instance + * @throws IOException IOException if content can't be parsed correctly + */ + public static Forecaster parse(XContentParser parser, String forecasterId, Long version) throws IOException { + return parse(parser, forecasterId, version, null, null); + } + + /** + * Parse raw json content and given forecaster id into forecaster instance. + * + * @param parser json based content parser + * @param forecasterId forecaster id + * @param version forecast document version + * @param defaultForecastInterval default forecaster interval + * @param defaultForecastWindowDelay default forecaster window delay + * @return forecaster instance + * @throws IOException IOException if content can't be parsed correctly + */ + public static Forecaster parse( + XContentParser parser, + String forecasterId, + Long version, + TimeValue defaultForecastInterval, + TimeValue defaultForecastWindowDelay + ) throws IOException { + String name = null; + String description = ""; + String timeField = null; + List indices = new ArrayList(); + QueryBuilder filterQuery = QueryBuilders.matchAllQuery(); + TimeConfiguration forecastInterval = defaultForecastInterval == null + ? null + : new IntervalTimeConfiguration(defaultForecastInterval.getMinutes(), ChronoUnit.MINUTES); + TimeConfiguration windowDelay = defaultForecastWindowDelay == null + ? null + : new IntervalTimeConfiguration(defaultForecastWindowDelay.getSeconds(), ChronoUnit.SECONDS); + Integer shingleSize = null; + List features = new ArrayList<>(); + Integer schemaVersion = CommonValue.NO_SCHEMA_VERSION; + Map uiMetadata = null; + Instant lastUpdateTime = null; + User user = null; + String resultIndex = null; + + List categoryField = null; + Integer horizon = null; + ImputationOption interpolationOption = null; + + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = parser.currentName(); + parser.nextToken(); + + switch (fieldName) { + case NAME_FIELD: + name = parser.text(); + break; + case DESCRIPTION_FIELD: + description = parser.text(); + break; + case TIMEFIELD_FIELD: + timeField = parser.text(); + break; + case INDICES_FIELD: + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + indices.add(parser.text()); + } + break; + case UI_METADATA_FIELD: + uiMetadata = parser.map(); + break; + case CommonName.SCHEMA_VERSION_FIELD: + schemaVersion = parser.intValue(); + break; + case FILTER_QUERY_FIELD: + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); + try { + filterQuery = parseInnerQueryBuilder(parser); + } catch (ParsingException | XContentParseException e) { + throw new ValidationException( + "Custom query error in data filter: " + e.getMessage(), + ValidationIssueType.FILTER_QUERY, + ValidationAspect.FORECASTER + ); + } catch (IllegalArgumentException e) { + if (!e.getMessage().contains("empty clause")) { + throw e; + } + } + break; + case FORECAST_INTERVAL_FIELD: + try { + forecastInterval = TimeConfiguration.parse(parser); + } catch (Exception e) { + if (e instanceof IllegalArgumentException && e.getMessage().contains(CommonMessages.NEGATIVE_TIME_CONFIGURATION)) { + throw new ValidationException( + "Forecasting interval must be a positive integer", + ValidationIssueType.FORECAST_INTERVAL, + ValidationAspect.FORECASTER + ); + } + throw e; + } + break; + case FEATURE_ATTRIBUTES_FIELD: + try { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + features.add(Feature.parse(parser)); + } + } catch (Exception e) { + if (e instanceof ParsingException || e instanceof XContentParseException) { + throw new ValidationException( + "Custom query error: " + e.getMessage(), + ValidationIssueType.FEATURE_ATTRIBUTES, + ValidationAspect.FORECASTER + ); + } + throw e; + } + break; + case WINDOW_DELAY_FIELD: + try { + windowDelay = TimeConfiguration.parse(parser); + } catch (Exception e) { + if (e instanceof IllegalArgumentException && e.getMessage().contains(CommonMessages.NEGATIVE_TIME_CONFIGURATION)) { + throw new ValidationException( + "Window delay interval must be a positive integer", + ValidationIssueType.WINDOW_DELAY, + ValidationAspect.FORECASTER + ); + } + throw e; + } + break; + case SHINGLE_SIZE_FIELD: + shingleSize = parser.intValue(); + break; + case LAST_UPDATE_TIME_FIELD: + lastUpdateTime = ParseUtils.toInstant(parser); + break; + case CATEGORY_FIELD: + categoryField = (List) parser.list(); + break; + case USER_FIELD: + user = User.parse(parser); + break; + case RESULT_INDEX_FIELD: + resultIndex = parser.text(); + break; + case HORIZON_FIELD: + horizon = parser.intValue(); + break; + case IMPUTATION_OPTION_FIELD: + interpolationOption = ImputationOption.parse(parser); + break; + default: + parser.skipChildren(); + break; + } + } + Forecaster forecaster = new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + getShingleSize(shingleSize), + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryField, + user, + resultIndex, + horizon, + interpolationOption + ); + return forecaster; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + XContentBuilder xContentBuilder = builder.startObject(); + xContentBuilder = super.toXContent(xContentBuilder, params); + xContentBuilder.field(FORECAST_INTERVAL_FIELD, interval).field(HORIZON_FIELD, horizon); + + return xContentBuilder.endObject(); + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Forecaster forecaster = (Forecaster) o; + return super.equals(o) && Objects.equal(horizon, forecaster.horizon); + } + + @Override + public int hashCode() { + int hash = super.hashCode(); + hash = 89 * hash + (this.horizon != null ? this.horizon.hashCode() : 0); + return hash; + } + + @Override + public String validateCustomResultIndex(String resultIndex) { + if (resultIndex != null && !resultIndex.startsWith(CUSTOM_RESULT_INDEX_PREFIX)) { + return ForecastCommonMessages.INVALID_RESULT_INDEX_PREFIX; + } + return super.validateCustomResultIndex(resultIndex); + } + + @Override + protected ValidationAspect getConfigValidationAspect() { + return ValidationAspect.FORECASTER; + } + + public Integer getHorizon() { + return horizon; + } +} diff --git a/src/main/java/org/opensearch/ad/Name.java b/src/main/java/org/opensearch/timeseries/Name.java similarity index 95% rename from src/main/java/org/opensearch/ad/Name.java rename to src/main/java/org/opensearch/timeseries/Name.java index 4b1915d7e..d53a2a33a 100644 --- a/src/main/java/org/opensearch/ad/Name.java +++ b/src/main/java/org/opensearch/timeseries/Name.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; import java.util.Collection; import java.util.HashSet; diff --git a/src/main/java/org/opensearch/timeseries/common/exception/ClientException.java b/src/main/java/org/opensearch/timeseries/common/exception/ClientException.java index 98c99821e..d8be97f37 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/ClientException.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/ClientException.java @@ -12,7 +12,7 @@ package org.opensearch.timeseries.common.exception; /** - * All exception visible to AD transport layer's client is under ClientException. + * All exception visible to transport layer's client is under ClientException. */ public class ClientException extends TimeSeriesException { @@ -20,15 +20,15 @@ public ClientException(String message) { super(message); } - public ClientException(String anomalyDetectorId, String message) { - super(anomalyDetectorId, message); + public ClientException(String configId, String message) { + super(configId, message); } - public ClientException(String anomalyDetectorId, String message, Throwable throwable) { - super(anomalyDetectorId, message, throwable); + public ClientException(String configId, String message, Throwable throwable) { + super(configId, message, throwable); } - public ClientException(String anomalyDetectorId, Throwable cause) { - super(anomalyDetectorId, cause); + public ClientException(String configId, Throwable cause) { + super(configId, cause); } } diff --git a/src/main/java/org/opensearch/timeseries/common/exception/EndRunException.java b/src/main/java/org/opensearch/timeseries/common/exception/EndRunException.java index 0babf52e0..a4b11c621 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/EndRunException.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/EndRunException.java @@ -23,13 +23,13 @@ public EndRunException(String message, boolean endNow) { this.endNow = endNow; } - public EndRunException(String anomalyDetectorId, String message, boolean endNow) { - super(anomalyDetectorId, message); + public EndRunException(String configId, String message, boolean endNow) { + super(configId, message); this.endNow = endNow; } - public EndRunException(String anomalyDetectorId, String message, Throwable throwable, boolean endNow) { - super(anomalyDetectorId, message, throwable); + public EndRunException(String configId, String message, Throwable throwable, boolean endNow) { + super(configId, message, throwable); this.endNow = endNow; } diff --git a/src/main/java/org/opensearch/timeseries/common/exception/InternalFailure.java b/src/main/java/org/opensearch/timeseries/common/exception/InternalFailure.java index 04ab80a12..c7c9048cb 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/InternalFailure.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/InternalFailure.java @@ -17,19 +17,19 @@ */ public class InternalFailure extends ClientException { - public InternalFailure(String anomalyDetectorId, String message) { - super(anomalyDetectorId, message); + public InternalFailure(String configId, String message) { + super(configId, message); } - public InternalFailure(String anomalyDetectorId, String message, Throwable cause) { - super(anomalyDetectorId, message, cause); + public InternalFailure(String configId, String message, Throwable cause) { + super(configId, message, cause); } - public InternalFailure(String anomalyDetectorId, Throwable cause) { - super(anomalyDetectorId, cause); + public InternalFailure(String configId, Throwable cause) { + super(configId, cause); } public InternalFailure(TimeSeriesException cause) { - super(cause.getAnomalyDetectorId(), cause); + super(cause.getConfigId(), cause); } } diff --git a/src/main/java/org/opensearch/timeseries/common/exception/LimitExceededException.java b/src/main/java/org/opensearch/timeseries/common/exception/LimitExceededException.java index c06258fad..e51a2bc4e 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/LimitExceededException.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/LimitExceededException.java @@ -17,13 +17,13 @@ public class LimitExceededException extends EndRunException { /** - * Constructor with an anomaly detector ID and an explanation. + * Constructor with a config ID and an explanation. * - * @param anomalyDetectorId ID of the anomaly detector for which the limit is exceeded + * @param id ID of the time series analysis for which the limit is exceeded * @param message explanation for the limit */ - public LimitExceededException(String anomalyDetectorId, String message) { - super(anomalyDetectorId, message, true); + public LimitExceededException(String id, String message) { + super(id, message, true); this.countedInStats(false); } @@ -47,14 +47,14 @@ public LimitExceededException(String message, boolean endRun) { } /** - * Constructor with an anomaly detector ID and an explanation, and a flag for stopping. + * Constructor with a config ID and an explanation, and a flag for stopping. * - * @param anomalyDetectorId ID of the anomaly detector for which the limit is exceeded + * @param id ID of the time series analysis for which the limit is exceeded * @param message explanation for the limit - * @param stopNow whether to stop detector immediately + * @param stopNow whether to stop time series analysis immediately */ - public LimitExceededException(String anomalyDetectorId, String message, boolean stopNow) { - super(anomalyDetectorId, message, stopNow); + public LimitExceededException(String id, String message, boolean stopNow) { + super(id, message, stopNow); this.countedInStats(false); } } diff --git a/src/main/java/org/opensearch/timeseries/common/exception/NotSerializedExceptionName.java b/src/main/java/org/opensearch/timeseries/common/exception/NotSerializedExceptionName.java index d9e279068..0cd2212be 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/NotSerializedExceptionName.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/NotSerializedExceptionName.java @@ -32,7 +32,7 @@ public enum NotSerializedExceptionName { RESOURCE_NOT_FOUND_EXCEPTION_NAME_UNDERSCORE(BaseExceptionsHelper.getExceptionName(new ResourceNotFoundException("", ""))), LIMIT_EXCEEDED_EXCEPTION_NAME_UNDERSCORE(BaseExceptionsHelper.getExceptionName(new LimitExceededException("", "", false))), END_RUN_EXCEPTION_NAME_UNDERSCORE(BaseExceptionsHelper.getExceptionName(new EndRunException("", "", false))), - ANOMALY_DETECTION_EXCEPTION_NAME_UNDERSCORE(BaseExceptionsHelper.getExceptionName(new TimeSeriesException("", ""))), + TIME_SERIES_DETECTION_EXCEPTION_NAME_UNDERSCORE(BaseExceptionsHelper.getExceptionName(new TimeSeriesException("", ""))), INTERNAL_FAILURE_NAME_UNDERSCORE(BaseExceptionsHelper.getExceptionName(new InternalFailure("", ""))), CLIENT_EXCEPTION_NAME_UNDERSCORE(BaseExceptionsHelper.getExceptionName(new ClientException("", ""))), CANCELLATION_EXCEPTION_NAME_UNDERSCORE(BaseExceptionsHelper.getExceptionName(new TaskCancelledException("", ""))), @@ -52,40 +52,40 @@ public String getName() { } /** - * Convert from a NotSerializableExceptionWrapper to an AnomalyDetectionException. + * Convert from a NotSerializableExceptionWrapper to a TimeSeriesException. * Since NotSerializableExceptionWrapper does not keep some details we need, we * initialize the exception with default values. * @param exception an NotSerializableExceptionWrapper exception. - * @param adID Detector Id. - * @return converted AnomalyDetectionException + * @param configID Config Id. + * @return converted TimeSeriesException */ - public static Optional convertWrappedAnomalyDetectionException( + public static Optional convertWrappedTimeSeriesException( NotSerializableExceptionWrapper exception, - String adID + String configID ) { String exceptionMsg = exception.getMessage().trim(); TimeSeriesException convertedException = null; - for (NotSerializedExceptionName adException : values()) { - if (exceptionMsg.startsWith(adException.getName())) { - switch (adException) { + for (NotSerializedExceptionName timeseriesException : values()) { + if (exceptionMsg.startsWith(timeseriesException.getName())) { + switch (timeseriesException) { case RESOURCE_NOT_FOUND_EXCEPTION_NAME_UNDERSCORE: - convertedException = new ResourceNotFoundException(adID, exceptionMsg); + convertedException = new ResourceNotFoundException(configID, exceptionMsg); break; case LIMIT_EXCEEDED_EXCEPTION_NAME_UNDERSCORE: - convertedException = new LimitExceededException(adID, exceptionMsg, false); + convertedException = new LimitExceededException(configID, exceptionMsg, false); break; case END_RUN_EXCEPTION_NAME_UNDERSCORE: - convertedException = new EndRunException(adID, exceptionMsg, false); + convertedException = new EndRunException(configID, exceptionMsg, false); break; - case ANOMALY_DETECTION_EXCEPTION_NAME_UNDERSCORE: - convertedException = new TimeSeriesException(adID, exceptionMsg); + case TIME_SERIES_DETECTION_EXCEPTION_NAME_UNDERSCORE: + convertedException = new TimeSeriesException(configID, exceptionMsg); break; case INTERNAL_FAILURE_NAME_UNDERSCORE: - convertedException = new InternalFailure(adID, exceptionMsg); + convertedException = new InternalFailure(configID, exceptionMsg); break; case CLIENT_EXCEPTION_NAME_UNDERSCORE: - convertedException = new ClientException(adID, exceptionMsg); + convertedException = new ClientException(configID, exceptionMsg); break; case CANCELLATION_EXCEPTION_NAME_UNDERSCORE: convertedException = new TaskCancelledException(exceptionMsg, ""); @@ -100,7 +100,7 @@ public static Optional convertWrappedAnomalyDetectionExcept convertedException = new ValidationException(exceptionMsg, null, null); break; default: - LOG.warn(new ParameterizedMessage("Unexpected AD exception {}", adException)); + LOG.warn(new ParameterizedMessage("Unexpected exception {}", timeseriesException)); break; } } diff --git a/src/main/java/org/opensearch/timeseries/common/exception/ResourceNotFoundException.java b/src/main/java/org/opensearch/timeseries/common/exception/ResourceNotFoundException.java index 061060975..eddbcac99 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/ResourceNotFoundException.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/ResourceNotFoundException.java @@ -17,13 +17,13 @@ public class ResourceNotFoundException extends TimeSeriesException { /** - * Constructor with an anomaly detector ID and a message. + * Constructor with a config ID and a message. * - * @param detectorId ID of the detector related to the resource + * @param configId ID of the config related to the resource * @param message explains which resource is not found */ - public ResourceNotFoundException(String detectorId, String message) { - super(detectorId, message); + public ResourceNotFoundException(String configId, String message) { + super(configId, message); countedInStats(false); } diff --git a/src/main/java/org/opensearch/timeseries/common/exception/TimeSeriesException.java b/src/main/java/org/opensearch/timeseries/common/exception/TimeSeriesException.java index 879947fcf..caa2573a9 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/TimeSeriesException.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/TimeSeriesException.java @@ -12,11 +12,11 @@ package org.opensearch.timeseries.common.exception; /** - * Base exception for exceptions thrown from Anomaly Detection. + * Base exception for exceptions thrown. */ public class TimeSeriesException extends RuntimeException { - private String anomalyDetectorId; + private String configId; // countedInStats will be used to tell whether the exception should be // counted in failure stats. private boolean countedInStats = true; @@ -26,37 +26,37 @@ public TimeSeriesException(String message) { } /** - * Constructor with an anomaly detector ID and a message. + * Constructor with a config ID and a message. * - * @param anomalyDetectorId anomaly detector ID + * @param configId config ID * @param message message of the exception */ - public TimeSeriesException(String anomalyDetectorId, String message) { + public TimeSeriesException(String configId, String message) { super(message); - this.anomalyDetectorId = anomalyDetectorId; + this.configId = configId; } - public TimeSeriesException(String adID, String message, Throwable cause) { + public TimeSeriesException(String configID, String message, Throwable cause) { super(message, cause); - this.anomalyDetectorId = adID; + this.configId = configID; } public TimeSeriesException(Throwable cause) { super(cause); } - public TimeSeriesException(String adID, Throwable cause) { + public TimeSeriesException(String configID, Throwable cause) { super(cause); - this.anomalyDetectorId = adID; + this.configId = configID; } /** - * Returns the ID of the anomaly detector. + * Returns the ID of the analysis config. * - * @return anomaly detector ID + * @return config ID */ - public String getAnomalyDetectorId() { - return this.anomalyDetectorId; + public String getConfigId() { + return this.configId; } /** @@ -82,8 +82,7 @@ public TimeSeriesException countedInStats(boolean countInStats) { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("Anomaly Detector "); - sb.append(anomalyDetectorId); + sb.append(configId); sb.append(' '); sb.append(super.toString()); return sb.toString(); diff --git a/src/main/java/org/opensearch/timeseries/common/exception/ValidationException.java b/src/main/java/org/opensearch/timeseries/common/exception/ValidationException.java index bf3c20aa2..4c18c13fe 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/ValidationException.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/ValidationException.java @@ -11,7 +11,7 @@ package org.opensearch.timeseries.common.exception; -import org.opensearch.ad.model.AnomalyDetector; +import org.opensearch.timeseries.model.Config; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.model.ValidationAspect; import org.opensearch.timeseries.model.ValidationIssueType; @@ -53,7 +53,7 @@ public ValidationException( ValidationAspect aspect, IntervalTimeConfiguration intervalSuggestion ) { - super(AnomalyDetector.NO_ID, message, cause); + super(Config.NO_ID, message, cause); this.type = type; this.aspect = aspect; this.intervalSuggestion = intervalSuggestion; diff --git a/src/main/java/org/opensearch/timeseries/common/exception/VersionException.java b/src/main/java/org/opensearch/timeseries/common/exception/VersionException.java index 3c780e9b4..b9fac314c 100644 --- a/src/main/java/org/opensearch/timeseries/common/exception/VersionException.java +++ b/src/main/java/org/opensearch/timeseries/common/exception/VersionException.java @@ -20,7 +20,7 @@ public VersionException(String message) { super(message); } - public VersionException(String anomalyDetectorId, String message) { - super(anomalyDetectorId, message); + public VersionException(String configId, String message) { + super(configId, message); } } diff --git a/src/main/java/org/opensearch/timeseries/constant/CommonValue.java b/src/main/java/org/opensearch/timeseries/constant/CommonValue.java new file mode 100644 index 000000000..6f05f59d0 --- /dev/null +++ b/src/main/java/org/opensearch/timeseries/constant/CommonValue.java @@ -0,0 +1,12 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.timeseries.constant; + +public class CommonValue { + // unknown or no schema version + public static Integer NO_SCHEMA_VERSION = 0; + +} diff --git a/src/main/java/org/opensearch/timeseries/dataprocessor/ImputationOption.java b/src/main/java/org/opensearch/timeseries/dataprocessor/ImputationOption.java index 9943171e5..e073d1316 100644 --- a/src/main/java/org/opensearch/timeseries/dataprocessor/ImputationOption.java +++ b/src/main/java/org/opensearch/timeseries/dataprocessor/ImputationOption.java @@ -26,17 +26,20 @@ public class ImputationOption implements Writeable, ToXContent { // field name in toXContent public static final String METHOD_FIELD = "method"; public static final String DEFAULT_FILL_FIELD = "defaultFill"; + public static final String INTEGER_SENSITIVE_FIELD = "integerSensitive"; private final ImputationMethod method; private final Optional defaultFill; + private final boolean integerSentive; - public ImputationOption(ImputationMethod method, Optional defaultFill) { + public ImputationOption(ImputationMethod method, Optional defaultFill, boolean integerSentive) { this.method = method; this.defaultFill = defaultFill; + this.integerSentive = integerSentive; } public ImputationOption(ImputationMethod method) { - this(method, Optional.empty()); + this(method, Optional.empty(), false); } public ImputationOption(StreamInput in) throws IOException { @@ -46,6 +49,7 @@ public ImputationOption(StreamInput in) throws IOException { } else { this.defaultFill = Optional.empty(); } + this.integerSentive = in.readBoolean(); } @Override @@ -57,11 +61,13 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(true); out.writeDoubleArray(defaultFill.get()); } + out.writeBoolean(integerSentive); } public static ImputationOption parse(XContentParser parser) throws IOException { ImputationMethod method = ImputationMethod.ZERO; - List defaultFill = new ArrayList<>(); + List defaultFill = null; + Boolean integerSensitive = null; ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { @@ -73,15 +79,23 @@ public static ImputationOption parse(XContentParser parser) throws IOException { break; case DEFAULT_FILL_FIELD: ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); + defaultFill = new ArrayList<>(); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { defaultFill.add(parser.doubleValue()); } break; + case INTEGER_SENSITIVE_FIELD: + integerSensitive = parser.booleanValue(); + break; default: break; } } - return new ImputationOption(method, Optional.of(defaultFill.stream().mapToDouble(Double::doubleValue).toArray())); + return new ImputationOption( + method, + Optional.ofNullable(defaultFill).map(list -> list.stream().mapToDouble(Double::doubleValue).toArray()), + integerSensitive + ); } public XContentBuilder toXContent(XContentBuilder builder) throws IOException { @@ -92,13 +106,13 @@ public XContentBuilder toXContent(XContentBuilder builder) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { XContentBuilder xContentBuilder = builder.startObject(); - xContentBuilder.field(METHOD_FIELD, method); + builder.field(METHOD_FIELD, method); if (!defaultFill.isEmpty()) { builder.array(DEFAULT_FILL_FIELD, defaultFill.get()); } - builder.endObject(); - return builder; + builder.field(INTEGER_SENSITIVE_FIELD, integerSentive); + return xContentBuilder.endObject(); } @Override @@ -110,12 +124,13 @@ public boolean equals(Object o) { ImputationOption other = (ImputationOption) o; return method == other.method - && (defaultFill.isEmpty() ? other.defaultFill.isEmpty() : Arrays.equals(defaultFill.get(), other.defaultFill.get())); + && (defaultFill.isEmpty() ? other.defaultFill.isEmpty() : Arrays.equals(defaultFill.get(), other.defaultFill.get())) + && integerSentive == other.integerSentive; } @Override public int hashCode() { - return Objects.hash(method, (defaultFill.isEmpty() ? 0 : Arrays.hashCode(defaultFill.get()))); + return Objects.hash(method, (defaultFill.isEmpty() ? 0 : Arrays.hashCode(defaultFill.get())), integerSentive); } public ImputationMethod getMethod() { @@ -125,4 +140,8 @@ public ImputationMethod getMethod() { public Optional getDefaultFill() { return defaultFill; } + + public boolean isIntegerSentive() { + return integerSentive; + } } diff --git a/src/main/java/org/opensearch/timeseries/model/Config.java b/src/main/java/org/opensearch/timeseries/model/Config.java new file mode 100644 index 000000000..ae32e59d3 --- /dev/null +++ b/src/main/java/org/opensearch/timeseries/model/Config.java @@ -0,0 +1,562 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.timeseries.model; + +import static org.opensearch.timeseries.constant.CommonMessages.INVALID_CHAR_IN_RESULT_INDEX_NAME; + +import java.io.IOException; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.util.Strings; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.timeseries.annotation.Generated; +import org.opensearch.timeseries.common.exception.TimeSeriesException; +import org.opensearch.timeseries.common.exception.ValidationException; +import org.opensearch.timeseries.constant.CommonMessages; +import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.dataprocessor.FixedValueImputer; +import org.opensearch.timeseries.dataprocessor.ImputationMethod; +import org.opensearch.timeseries.dataprocessor.ImputationOption; +import org.opensearch.timeseries.dataprocessor.Imputer; +import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; +import org.opensearch.timeseries.dataprocessor.PreviousValueImputer; +import org.opensearch.timeseries.dataprocessor.ZeroImputer; +import org.opensearch.timeseries.settings.TimeSeriesSettings; + +import com.google.common.base.Objects; +import com.google.common.collect.ImmutableList; + +public abstract class Config implements Writeable, ToXContentObject { + private static final Logger logger = LogManager.getLogger(Config.class); + + public static final int MAX_RESULT_INDEX_NAME_SIZE = 255; + // OS doesn’t allow uppercase: https://tinyurl.com/yse2xdbx + public static final String RESULT_INDEX_NAME_PATTERN = "[a-z0-9_-]+"; + + public static final String NO_ID = ""; + public static final String TIMEOUT = "timeout"; + public static final String GENERAL_SETTINGS = "general_settings"; + public static final String AGGREGATION = "aggregation_issue"; + + // field in JSON representation + public static final String NAME_FIELD = "name"; + public static final String DESCRIPTION_FIELD = "description"; + public static final String TIMEFIELD_FIELD = "time_field"; + public static final String INDICES_FIELD = "indices"; + public static final String UI_METADATA_FIELD = "ui_metadata"; + public static final String FILTER_QUERY_FIELD = "filter_query"; + public static final String FEATURE_ATTRIBUTES_FIELD = "feature_attributes"; + public static final String WINDOW_DELAY_FIELD = "window_delay"; + public static final String SHINGLE_SIZE_FIELD = "shingle_size"; + public static final String LAST_UPDATE_TIME_FIELD = "last_update_time"; + public static final String CATEGORY_FIELD = "category_field"; + public static final String USER_FIELD = "user"; + public static final String RESULT_INDEX_FIELD = "result_index"; + public static final String IMPUTATION_OPTION_FIELD = "imputation_option"; + + private static final Imputer zeroImputer; + private static final Imputer previousImputer; + private static final Imputer linearImputer; + private static final Imputer linearImputerIntegerSensitive; + + protected String id; + protected Long version; + protected String name; + protected String description; + protected String timeField; + protected List indices; + protected List featureAttributes; + protected QueryBuilder filterQuery; + protected TimeConfiguration interval; + protected TimeConfiguration windowDelay; + protected Integer shingleSize; + protected String customResultIndex; + protected Map uiMetadata; + protected Integer schemaVersion; + protected Instant lastUpdateTime; + protected List categoryFields; + protected User user; + protected ImputationOption imputationOption; + + // validation error + protected String errorMessage; + protected ValidationIssueType issueType; + + protected Imputer imputer; + + public static String INVALID_RESULT_INDEX_NAME_SIZE = "Result index name size must contains less than " + + MAX_RESULT_INDEX_NAME_SIZE + + " characters"; + + static { + zeroImputer = new ZeroImputer(); + previousImputer = new PreviousValueImputer(); + linearImputer = new LinearUniformImputer(false); + linearImputerIntegerSensitive = new LinearUniformImputer(true); + } + + protected Config( + String id, + Long version, + String name, + String description, + String timeField, + List indices, + List features, + QueryBuilder filterQuery, + TimeConfiguration windowDelay, + Integer shingleSize, + Map uiMetadata, + Integer schemaVersion, + Instant lastUpdateTime, + List categoryFields, + User user, + String resultIndex, + TimeConfiguration interval, + ImputationOption imputationOption + ) { + if (Strings.isBlank(name)) { + errorMessage = CommonMessages.EMPTY_NAME; + issueType = ValidationIssueType.NAME; + return; + } + if (Strings.isBlank(timeField)) { + errorMessage = CommonMessages.NULL_TIME_FIELD; + issueType = ValidationIssueType.TIMEFIELD_FIELD; + return; + } + if (indices == null || indices.isEmpty()) { + errorMessage = CommonMessages.EMPTY_INDICES; + issueType = ValidationIssueType.INDICES; + return; + } + + if (invalidShingleSizeRange(shingleSize)) { + errorMessage = "Shingle size must be a positive integer no larger than " + + TimeSeriesSettings.MAX_SHINGLE_SIZE + + ". Got " + + shingleSize; + issueType = ValidationIssueType.SHINGLE_SIZE_FIELD; + return; + } + + errorMessage = validateCustomResultIndex(resultIndex); + if (errorMessage != null) { + issueType = ValidationIssueType.RESULT_INDEX; + return; + } + + if (imputationOption != null + && imputationOption.getMethod() == ImputationMethod.FIXED_VALUES + && imputationOption.getDefaultFill().isEmpty()) { + issueType = ValidationIssueType.IMPUTATION; + errorMessage = "No given values for fixed value interpolation"; + return; + } + + this.id = id; + this.version = version; + this.name = name; + this.description = description; + this.timeField = timeField; + this.indices = indices; + this.featureAttributes = features == null ? ImmutableList.of() : ImmutableList.copyOf(features); + this.filterQuery = filterQuery; + this.interval = interval; + this.windowDelay = windowDelay; + this.shingleSize = getShingleSize(shingleSize); + this.uiMetadata = uiMetadata; + this.schemaVersion = schemaVersion; + this.lastUpdateTime = lastUpdateTime; + this.categoryFields = categoryFields; + this.user = user; + this.customResultIndex = Strings.trimToNull(resultIndex); + this.imputationOption = imputationOption; + this.imputer = createImputer(); + this.issueType = null; + this.errorMessage = null; + } + + public Config(StreamInput input) throws IOException { + id = input.readOptionalString(); + version = input.readOptionalLong(); + name = input.readString(); + description = input.readOptionalString(); + timeField = input.readString(); + indices = input.readStringList(); + featureAttributes = input.readList(Feature::new); + filterQuery = input.readNamedWriteable(QueryBuilder.class); + interval = IntervalTimeConfiguration.readFrom(input); + windowDelay = IntervalTimeConfiguration.readFrom(input); + shingleSize = input.readInt(); + schemaVersion = input.readInt(); + this.categoryFields = input.readOptionalStringList(); + lastUpdateTime = input.readInstant(); + if (input.readBoolean()) { + this.user = new User(input); + } else { + user = null; + } + if (input.readBoolean()) { + this.uiMetadata = input.readMap(); + } else { + this.uiMetadata = null; + } + customResultIndex = input.readOptionalString(); + if (input.readBoolean()) { + this.imputationOption = new ImputationOption(input); + } else { + this.imputationOption = null; + } + this.imputer = createImputer(); + } + + /* + * Implicit constructor that be called implicitly when a subtype + * needs to call like AnomalyDetector(StreamInput). Otherwise, + * we will have compiler error: + * "Implicit super constructor Config() is undefined. + * Must explicitly invoke another constructor". + */ + public Config() { + this.imputer = null; + } + + @Override + public void writeTo(StreamOutput output) throws IOException { + output.writeOptionalString(id); + output.writeOptionalLong(version); + output.writeString(name); + output.writeOptionalString(description); + output.writeString(timeField); + output.writeStringCollection(indices); + output.writeList(featureAttributes); + output.writeNamedWriteable(filterQuery); + interval.writeTo(output); + windowDelay.writeTo(output); + output.writeInt(shingleSize); + output.writeInt(schemaVersion); + output.writeOptionalStringCollection(categoryFields); + output.writeInstant(lastUpdateTime); + if (user != null) { + output.writeBoolean(true); // user exists + user.writeTo(output); + } else { + output.writeBoolean(false); // user does not exist + } + if (uiMetadata != null) { + output.writeBoolean(true); + output.writeMap(uiMetadata); + } else { + output.writeBoolean(false); + } + output.writeOptionalString(customResultIndex); + if (imputationOption != null) { + output.writeBoolean(true); + imputationOption.writeTo(output); + } else { + output.writeBoolean(false); + } + } + + /** + * If the given shingle size is null, return default; + * otherwise, return the given shingle size. + * + * @param customShingleSize Given shingle size + * @return Shingle size + */ + protected static Integer getShingleSize(Integer customShingleSize) { + return customShingleSize == null ? TimeSeriesSettings.DEFAULT_SHINGLE_SIZE : customShingleSize; + } + + public boolean invalidShingleSizeRange(Integer shingleSizeToTest) { + return shingleSizeToTest != null && (shingleSizeToTest < 1 || shingleSizeToTest > TimeSeriesSettings.MAX_SHINGLE_SIZE); + } + + /** + * + * @return either ValidationAspect.FORECASTER or ValidationAspect.DETECTOR + * depending on this is a forecaster or detector config. + */ + protected abstract ValidationAspect getConfigValidationAspect(); + + @Generated + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + Config config = (Config) o; + // a few fields not included: + // 1)didn't include uiMetadata since toXContent/parse will produce a map of map + // and cause the parsed one not equal to the original one. This can be confusing. + // 2)didn't include id, schemaVersion, and lastUpdateTime as we deemed equality based on contents. + // Including id fails tests like AnomalyDetectorExecutionInput.testParseAnomalyDetectorExecutionInput. + return Objects.equal(name, config.name) + && Objects.equal(description, config.description) + && Objects.equal(timeField, config.timeField) + && Objects.equal(indices, config.indices) + && Objects.equal(featureAttributes, config.featureAttributes) + && Objects.equal(filterQuery, config.filterQuery) + && Objects.equal(interval, config.interval) + && Objects.equal(windowDelay, config.windowDelay) + && Objects.equal(shingleSize, config.shingleSize) + && Objects.equal(categoryFields, config.categoryFields) + && Objects.equal(user, config.user) + && Objects.equal(customResultIndex, config.customResultIndex) + && Objects.equal(imputationOption, config.imputationOption); + } + + @Generated + @Override + public int hashCode() { + return Objects + .hashCode( + name, + description, + timeField, + indices, + featureAttributes, + filterQuery, + interval, + windowDelay, + shingleSize, + categoryFields, + schemaVersion, + user, + customResultIndex, + imputationOption + ); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder + .field(NAME_FIELD, name) + .field(DESCRIPTION_FIELD, description) + .field(TIMEFIELD_FIELD, timeField) + .field(INDICES_FIELD, indices.toArray()) + .field(FILTER_QUERY_FIELD, filterQuery) + .field(WINDOW_DELAY_FIELD, windowDelay) + .field(SHINGLE_SIZE_FIELD, shingleSize) + .field(CommonName.SCHEMA_VERSION_FIELD, schemaVersion) + .field(FEATURE_ATTRIBUTES_FIELD, featureAttributes.toArray()); + + if (uiMetadata != null && !uiMetadata.isEmpty()) { + builder.field(UI_METADATA_FIELD, uiMetadata); + } + if (lastUpdateTime != null) { + builder.field(LAST_UPDATE_TIME_FIELD, lastUpdateTime.toEpochMilli()); + } + if (categoryFields != null) { + builder.field(CATEGORY_FIELD, categoryFields.toArray()); + } + if (user != null) { + builder.field(USER_FIELD, user); + } + if (customResultIndex != null) { + builder.field(RESULT_INDEX_FIELD, customResultIndex); + } + if (imputationOption != null) { + builder.field(IMPUTATION_OPTION_FIELD, imputationOption); + } + return builder; + } + + public Long getVersion() { + return version; + } + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public String getTimeField() { + return timeField; + } + + public List getIndices() { + return indices; + } + + public List getFeatureAttributes() { + return featureAttributes; + } + + public QueryBuilder getFilterQuery() { + return filterQuery; + } + + /** + * Returns enabled feature ids in the same order in feature attributes. + * + * @return a list of filtered feature ids. + */ + public List getEnabledFeatureIds() { + return featureAttributes.stream().filter(Feature::getEnabled).map(Feature::getId).collect(Collectors.toList()); + } + + public List getEnabledFeatureNames() { + return featureAttributes.stream().filter(Feature::getEnabled).map(Feature::getName).collect(Collectors.toList()); + } + + public TimeConfiguration getInterval() { + return interval; + } + + public TimeConfiguration getWindowDelay() { + return windowDelay; + } + + public Integer getShingleSize() { + return shingleSize; + } + + public Map getUiMetadata() { + return uiMetadata; + } + + public Integer getSchemaVersion() { + return schemaVersion; + } + + public Instant getLastUpdateTime() { + return lastUpdateTime; + } + + public List getCategoryFields() { + return this.categoryFields; + } + + public String getId() { + return id; + } + + public long getIntervalInMilliseconds() { + return ((IntervalTimeConfiguration) getInterval()).toDuration().toMillis(); + } + + public long getIntervalInSeconds() { + return getIntervalInMilliseconds() / 1000; + } + + public long getIntervalInMinutes() { + return getIntervalInMilliseconds() / 1000 / 60; + } + + public Duration getIntervalDuration() { + return ((IntervalTimeConfiguration) getInterval()).toDuration(); + } + + public User getUser() { + return user; + } + + public void setUser(User user) { + this.user = user; + } + + public String getCustomResultIndex() { + return customResultIndex; + } + + public boolean isHighCardinality() { + return Config.isHC(getCategoryFields()); + } + + public boolean hasMultipleCategories() { + return categoryFields != null && categoryFields.size() > 1; + } + + public String validateCustomResultIndex(String resultIndex) { + if (resultIndex == null) { + return null; + } + if (resultIndex.length() > MAX_RESULT_INDEX_NAME_SIZE) { + return Config.INVALID_RESULT_INDEX_NAME_SIZE; + } + if (!resultIndex.matches(RESULT_INDEX_NAME_PATTERN)) { + return INVALID_CHAR_IN_RESULT_INDEX_NAME; + } + return null; + } + + public static boolean isHC(List categoryFields) { + return categoryFields != null && categoryFields.size() > 0; + } + + public ImputationOption getImputationOption() { + return imputationOption; + } + + public Imputer getImputer() { + if (imputer != null) { + return imputer; + } + imputer = createImputer(); + return imputer; + } + + protected Imputer createImputer() { + Imputer imputer = null; + + // default interpolator is using last known value + if (imputationOption == null) { + return previousImputer; + } + + switch (imputationOption.getMethod()) { + case ZERO: + imputer = zeroImputer; + break; + case FIXED_VALUES: + // we did validate default fill is not empty in the constructor + imputer = new FixedValueImputer(imputationOption.getDefaultFill().get()); + break; + case PREVIOUS: + imputer = previousImputer; + break; + case LINEAR: + if (imputationOption.isIntegerSentive()) { + imputer = linearImputerIntegerSensitive; + } else { + imputer = linearImputer; + } + break; + default: + logger.error("unsupported method: " + imputationOption.getMethod()); + imputer = new PreviousValueImputer(); + break; + } + return imputer; + } + + protected void checkAndThrowValidationErrors(ValidationAspect validationAspect) { + if (errorMessage != null && issueType != null) { + throw new ValidationException(errorMessage, issueType, validationAspect); + } else if (errorMessage != null || issueType != null) { + throw new TimeSeriesException(CommonMessages.FAIL_TO_VALIDATE); + } + } +} diff --git a/src/main/java/org/opensearch/timeseries/model/Feature.java b/src/main/java/org/opensearch/timeseries/model/Feature.java index 9c0b20ee8..e8f58dde6 100644 --- a/src/main/java/org/opensearch/timeseries/model/Feature.java +++ b/src/main/java/org/opensearch/timeseries/model/Feature.java @@ -30,7 +30,7 @@ import com.google.common.base.Objects; /** - * Anomaly detector feature + * time series to analyze (a.k.a. feature) */ public class Feature implements Writeable, ToXContentObject { diff --git a/src/main/java/org/opensearch/timeseries/model/IntervalTimeConfiguration.java b/src/main/java/org/opensearch/timeseries/model/IntervalTimeConfiguration.java index 435a17ce5..78df95467 100644 --- a/src/main/java/org/opensearch/timeseries/model/IntervalTimeConfiguration.java +++ b/src/main/java/org/opensearch/timeseries/model/IntervalTimeConfiguration.java @@ -22,7 +22,6 @@ import org.opensearch.common.io.stream.StreamOutput; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.timeseries.annotation.Generated; -import org.opensearch.timeseries.constant.CommonMessages; import com.google.common.base.Objects; import com.google.common.collect.ImmutableSet; @@ -43,7 +42,13 @@ public class IntervalTimeConfiguration extends TimeConfiguration { public IntervalTimeConfiguration(long interval, ChronoUnit unit) { if (interval < 0) { throw new IllegalArgumentException( - String.format(Locale.ROOT, "Interval %s %s", interval, CommonMessages.NEGATIVE_TIME_CONFIGURATION) + String + .format( + Locale.ROOT, + "Interval %s %s", + interval, + org.opensearch.timeseries.constant.CommonMessages.NEGATIVE_TIME_CONFIGURATION + ) ); } if (!SUPPORTED_UNITS.contains(unit)) { diff --git a/src/main/java/org/opensearch/timeseries/model/ValidationAspect.java b/src/main/java/org/opensearch/timeseries/model/ValidationAspect.java index 06792e348..95fbf2217 100644 --- a/src/main/java/org/opensearch/timeseries/model/ValidationAspect.java +++ b/src/main/java/org/opensearch/timeseries/model/ValidationAspect.java @@ -14,8 +14,9 @@ import java.util.Collection; import java.util.Set; -import org.opensearch.ad.Name; import org.opensearch.ad.constant.ADCommonName; +import org.opensearch.forecast.constant.ForecastCommonName; +import org.opensearch.timeseries.Name; import org.opensearch.timeseries.constant.CommonName; /** @@ -30,7 +31,8 @@ */ public enum ValidationAspect implements Name { DETECTOR(ADCommonName.DETECTOR_ASPECT), - MODEL(CommonName.MODEL_ASPECT); + MODEL(CommonName.MODEL_ASPECT), + FORECASTER(ForecastCommonName.FORECASTER_ASPECT); private String name; @@ -54,6 +56,8 @@ public static ValidationAspect getName(String name) { return DETECTOR; case CommonName.MODEL_ASPECT: return MODEL; + case ForecastCommonName.FORECASTER_ASPECT: + return FORECASTER; default: throw new IllegalArgumentException("Unsupported validation aspects"); } diff --git a/src/main/java/org/opensearch/timeseries/model/ValidationIssueType.java b/src/main/java/org/opensearch/timeseries/model/ValidationIssueType.java index 3fcfb8f10..01913a9c6 100644 --- a/src/main/java/org/opensearch/timeseries/model/ValidationIssueType.java +++ b/src/main/java/org/opensearch/timeseries/model/ValidationIssueType.java @@ -11,24 +11,28 @@ package org.opensearch.timeseries.model; -import org.opensearch.ad.Name; import org.opensearch.ad.model.AnomalyDetector; +import org.opensearch.forecast.model.Forecaster; +import org.opensearch.timeseries.Name; public enum ValidationIssueType implements Name { - NAME(AnomalyDetector.NAME_FIELD), - TIMEFIELD_FIELD(AnomalyDetector.TIMEFIELD_FIELD), - SHINGLE_SIZE_FIELD(AnomalyDetector.SHINGLE_SIZE_FIELD), - INDICES(AnomalyDetector.INDICES_FIELD), - FEATURE_ATTRIBUTES(AnomalyDetector.FEATURE_ATTRIBUTES_FIELD), + NAME(Config.NAME_FIELD), + TIMEFIELD_FIELD(Config.TIMEFIELD_FIELD), + SHINGLE_SIZE_FIELD(Config.SHINGLE_SIZE_FIELD), + INDICES(Config.INDICES_FIELD), + FEATURE_ATTRIBUTES(Config.FEATURE_ATTRIBUTES_FIELD), + CATEGORY(Config.CATEGORY_FIELD), + FILTER_QUERY(Config.FILTER_QUERY_FIELD), + WINDOW_DELAY(Config.WINDOW_DELAY_FIELD), + GENERAL_SETTINGS(Config.GENERAL_SETTINGS), + RESULT_INDEX(Config.RESULT_INDEX_FIELD), + TIMEOUT(Config.TIMEOUT), + AGGREGATION(Config.AGGREGATION), // this is a unique case where aggregation failed due to an issue in core but + // don't want to throw exception + IMPUTATION(Config.IMPUTATION_OPTION_FIELD), DETECTION_INTERVAL(AnomalyDetector.DETECTION_INTERVAL_FIELD), - CATEGORY(AnomalyDetector.CATEGORY_FIELD), - FILTER_QUERY(AnomalyDetector.FILTER_QUERY_FIELD), - WINDOW_DELAY(AnomalyDetector.WINDOW_DELAY_FIELD), - GENERAL_SETTINGS(AnomalyDetector.GENERAL_SETTINGS), - RESULT_INDEX(AnomalyDetector.RESULT_INDEX_FIELD), - TIMEOUT(AnomalyDetector.TIMEOUT), - AGGREGATION(AnomalyDetector.AGGREGATION); // this is a unique case where aggregation failed due to an issue in core but - // don't want to throw exception + FORECAST_INTERVAL(Forecaster.FORECAST_INTERVAL_FIELD), + HORIZON_SIZE(Forecaster.HORIZON_FIELD); private String name; diff --git a/src/main/java/org/opensearch/timeseries/util/ParseUtils.java b/src/main/java/org/opensearch/timeseries/util/ParseUtils.java index 232c0241b..b83753040 100644 --- a/src/main/java/org/opensearch/timeseries/util/ParseUtils.java +++ b/src/main/java/org/opensearch/timeseries/util/ParseUtils.java @@ -16,8 +16,6 @@ import static org.opensearch.ad.constant.ADCommonName.DATE_HISTOGRAM; import static org.opensearch.ad.constant.ADCommonName.EPOCH_MILLIS_FORMAT; import static org.opensearch.ad.constant.ADCommonName.FEATURE_AGGS; -import static org.opensearch.ad.model.AnomalyDetector.QUERY_PARAM_PERIOD_END; -import static org.opensearch.ad.model.AnomalyDetector.QUERY_PARAM_PERIOD_START; import static org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.opensearch.search.aggregations.AggregationBuilders.dateRange; import static org.opensearch.search.aggregations.AggregatorFactories.VALID_AGG_NAME; @@ -363,29 +361,6 @@ public static SearchSourceBuilder generatePreviewQuery( return new SearchSourceBuilder().query(detector.getFilterQuery()).size(0).aggregation(dateRangeBuilder); } - public static String generateInternalFeatureQueryTemplate(AnomalyDetector detector, NamedXContentRegistry xContentRegistry) - throws IOException { - RangeQueryBuilder rangeQuery = new RangeQueryBuilder(detector.getTimeField()) - .from("{{" + QUERY_PARAM_PERIOD_START + "}}") - .to("{{" + QUERY_PARAM_PERIOD_END + "}}"); - - BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(detector.getFilterQuery()); - - SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); - if (detector.getFeatureAttributes() != null) { - for (Feature feature : detector.getFeatureAttributes()) { - AggregatorFactories.Builder internalAgg = parseAggregators( - feature.getAggregation().toString(), - xContentRegistry, - feature.getId() - ); - internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); - } - } - - return internalSearchSourceBuilder.toString(); - } - public static SearchSourceBuilder generateEntityColdStartQuery( AnomalyDetector detector, List> ranges, @@ -667,14 +642,14 @@ public static SearchSourceBuilder batchFeatureQuery( BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(detector.getFilterQuery()); - if (detector.isMultientityDetector() && entity != null && entity.getAttributes().size() > 0) { + if (detector.isHighCardinality() && entity != null && entity.getAttributes().size() > 0) { entity .getAttributes() .entrySet() .forEach(attr -> { internalFilterQuery.filter(new TermQueryBuilder(attr.getKey(), attr.getValue())); }); } - long intervalSeconds = ((IntervalTimeConfiguration) detector.getDetectionInterval()).toDuration().getSeconds(); + long intervalSeconds = ((IntervalTimeConfiguration) detector.getInterval()).toDuration().getSeconds(); List> sources = new ArrayList<>(); sources diff --git a/src/test/java/org/opensearch/StreamInputOutputTests.java b/src/test/java/org/opensearch/StreamInputOutputTests.java index 3b9ecdc57..21bd03e2c 100644 --- a/src/test/java/org/opensearch/StreamInputOutputTests.java +++ b/src/test/java/org/opensearch/StreamInputOutputTests.java @@ -26,7 +26,6 @@ import java.util.Set; import org.opensearch.action.FailedNodeException; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.model.Entity; import org.opensearch.ad.model.EntityProfileName; import org.opensearch.ad.model.ModelProfile; @@ -43,12 +42,13 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.transport.TransportAddress; +import org.opensearch.timeseries.AbstractTimeSeriesTest; /** * Put in core package so that we can using Version's package private constructor * */ -public class StreamInputOutputTests extends AbstractADTest { +public class StreamInputOutputTests extends AbstractTimeSeriesTest { // public static Version V_1_1_0 = new Version(1010099, org.apache.lucene.util.Version.LUCENE_8_8_2); private EntityResultRequest entityResultRequest; private String detectorId; @@ -111,7 +111,7 @@ public void testDeSerializeEntityResultRequest() throws IOException { StreamInput streamInput = output.bytes().streamInput(); EntityResultRequest readRequest = new EntityResultRequest(streamInput); - assertThat(readRequest.getDetectorId(), equalTo(detectorId)); + assertThat(readRequest.getId(), equalTo(detectorId)); assertThat(readRequest.getStart(), equalTo(start)); assertThat(readRequest.getEnd(), equalTo(end)); assertTrue(areEqualWithArrayValue(readRequest.getEntities(), entities)); diff --git a/src/test/java/org/opensearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java b/src/test/java/org/opensearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java index 311a38a10..e92f56766 100644 --- a/src/test/java/org/opensearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java +++ b/src/test/java/org/opensearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java @@ -44,9 +44,7 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.ad.model.AnomalyDetector; @@ -65,6 +63,8 @@ import org.opensearch.rest.RestRequest; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.ValidationException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.transport.TransportService; @@ -76,7 +76,7 @@ * package private * */ -public class IndexAnomalyDetectorActionHandlerTests extends AbstractADTest { +public class IndexAnomalyDetectorActionHandlerTests extends AbstractTimeSeriesTest { static ThreadPool threadPool; private ThreadContext threadContext; private String TEXT_FIELD_TYPE = "text"; @@ -423,7 +423,7 @@ private void testUpdateTemplate(String fieldTypeName) throws IOException { int totalHits = 9; when(detectorResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); - GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX); + GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX); SearchResponse userIndexResponse = mock(SearchResponse.class); int userIndexHits = 0; @@ -620,7 +620,7 @@ public void testTenMultiEntityDetectorsUpdateSingleEntityAdToMulti() throws IOEx int totalHits = 10; AnomalyDetector existingDetector = TestHelpers.randomAnomalyDetectorUsingCategoryFields(detectorId, null); GetResponse getDetectorResponse = TestHelpers - .createGetResponse(existingDetector, existingDetector.getDetectorId(), CommonName.CONFIG_INDEX); + .createGetResponse(existingDetector, existingDetector.getId(), CommonName.CONFIG_INDEX); SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); @@ -703,7 +703,7 @@ public void testTenMultiEntityDetectorsUpdateSingleEntityAdToMulti() throws IOEx public void testTenMultiEntityDetectorsUpdateExistingMultiEntityAd() throws IOException { int totalHits = 10; AnomalyDetector detector = TestHelpers.randomAnomalyDetectorUsingCategoryFields(detectorId, Arrays.asList("a")); - GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX); + GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX); SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.getHits()).thenReturn(TestHelpers.createSearchHits(totalHits)); diff --git a/src/test/java/org/opensearch/action/admin/indices/mapping/get/ValidateAnomalyDetectorActionHandlerTests.java b/src/test/java/org/opensearch/action/admin/indices/mapping/get/ValidateAnomalyDetectorActionHandlerTests.java index 44bbcc89d..f79f38d3c 100644 --- a/src/test/java/org/opensearch/action/admin/indices/mapping/get/ValidateAnomalyDetectorActionHandlerTests.java +++ b/src/test/java/org/opensearch/action/admin/indices/mapping/get/ValidateAnomalyDetectorActionHandlerTests.java @@ -32,9 +32,7 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.ad.model.AnomalyDetector; @@ -52,13 +50,15 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.rest.RestRequest; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.ValidationException; import org.opensearch.timeseries.model.ValidationAspect; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableList; -public class ValidateAnomalyDetectorActionHandlerTests extends AbstractADTest { +public class ValidateAnomalyDetectorActionHandlerTests extends AbstractTimeSeriesTest { protected AbstractAnomalyDetectorActionHandler handler; protected ClusterService clusterService; diff --git a/src/test/java/org/opensearch/ad/ADIntegTestCase.java b/src/test/java/org/opensearch/ad/ADIntegTestCase.java index b51f2e8c1..cb23ca114 100644 --- a/src/test/java/org/opensearch/ad/ADIntegTestCase.java +++ b/src/test/java/org/opensearch/ad/ADIntegTestCase.java @@ -11,7 +11,6 @@ package org.opensearch.ad; -import static org.opensearch.ad.AbstractADTest.LOG; import static org.opensearch.ad.util.RestHandlerUtils.XCONTENT_WITH_TYPE; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; @@ -24,6 +23,8 @@ import java.util.List; import java.util.Map; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.Logger; import org.junit.Before; import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; @@ -60,6 +61,7 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.transport.MockTransportService; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.Feature; @@ -67,6 +69,7 @@ import com.google.common.collect.ImmutableMap; public abstract class ADIntegTestCase extends OpenSearchIntegTestCase { + protected static final Logger LOG = (Logger) LogManager.getLogger(ADIntegTestCase.class); private long timeout = 5_000; protected String timeField = "timestamp"; diff --git a/src/test/java/org/opensearch/ad/AbstractProfileRunnerTests.java b/src/test/java/org/opensearch/ad/AbstractProfileRunnerTests.java index a47ec9968..cd63cda8c 100644 --- a/src/test/java/org/opensearch/ad/AbstractProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/AbstractProfileRunnerTests.java @@ -44,9 +44,11 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.transport.TransportAddress; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; -public class AbstractProfileRunnerTests extends AbstractADTest { +public class AbstractProfileRunnerTests extends AbstractTimeSeriesTest { protected enum DetectorStatus { INDEX_NOT_EXIST, NO_DOC, diff --git a/src/test/java/org/opensearch/ad/AnomalyDetectorJobRunnerTests.java b/src/test/java/org/opensearch/ad/AnomalyDetectorJobRunnerTests.java index 636943894..efd61d7e8 100644 --- a/src/test/java/org/opensearch/ad/AnomalyDetectorJobRunnerTests.java +++ b/src/test/java/org/opensearch/ad/AnomalyDetectorJobRunnerTests.java @@ -86,6 +86,8 @@ import org.opensearch.jobscheduler.spi.schedule.Schedule; import org.opensearch.jobscheduler.spi.utils.LockService; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; @@ -93,7 +95,7 @@ import com.google.common.collect.ImmutableList; -public class AnomalyDetectorJobRunnerTests extends AbstractADTest { +public class AnomalyDetectorJobRunnerTests extends AbstractTimeSeriesTest { @Mock private Client client; @@ -400,7 +402,7 @@ private void testRunAdJobWithEndRunExceptionNowAndStopAdJob(boolean jobExists, b Instant.now(), 60L, TestHelpers.randomUser(), - jobParameter.getResultIndex() + jobParameter.getCustomResultIndex() ).toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS) ), Collections.emptyMap(), @@ -731,7 +733,7 @@ public void testMarkResultIndexQueried() throws IOException { // init real time task cache for the detector. We will do this during AnomalyResultTransportAction. // Since we mocked the execution by returning anomaly result directly, we need to init it explicitly. - adTaskCacheManager.initRealtimeTaskCache(detector.getDetectorId(), 0); + adTaskCacheManager.initRealtimeTaskCache(detector.getId(), 0); // recreate recorder since we need to use the unmocked adTaskCacheManager recorder = new ExecuteADResultResponseRecorder( @@ -746,7 +748,7 @@ public void testMarkResultIndexQueried() throws IOException { 32 ); - assertEquals(false, adTaskCacheManager.hasQueriedResultIndex(detector.getDetectorId())); + assertEquals(false, adTaskCacheManager.hasQueriedResultIndex(detector.getId())); LockModel lock = new LockModel(CommonName.JOB_INDEX, jobParameter.getName(), Instant.now(), 10, false); @@ -761,6 +763,6 @@ public void testMarkResultIndexQueried() throws IOException { verify(adTaskManager, times(1)) .updateLatestRealtimeTaskOnCoordinatingNode(any(), any(), totalUpdates.capture(), any(), any(), any()); assertEquals(NUM_MIN_SAMPLES, totalUpdates.getValue().longValue()); - assertEquals(true, adTaskCacheManager.hasQueriedResultIndex(detector.getDetectorId())); + assertEquals(true, adTaskCacheManager.hasQueriedResultIndex(detector.getId())); } } diff --git a/src/test/java/org/opensearch/ad/AnomalyDetectorProfileRunnerTests.java b/src/test/java/org/opensearch/ad/AnomalyDetectorProfileRunnerTests.java index 479e9cdc8..f786be6b8 100644 --- a/src/test/java/org/opensearch/ad/AnomalyDetectorProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/AnomalyDetectorProfileRunnerTests.java @@ -60,6 +60,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonMessages; @@ -122,7 +123,7 @@ private void setUpClientGet( if (request.index().equals(CommonName.CONFIG_INDEX)) { switch (detectorStatus) { case EXIST: - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); break; case INDEX_NOT_EXIST: listener.onFailure(new IndexNotFoundException(CommonName.CONFIG_INDEX)); @@ -143,11 +144,11 @@ private void setUpClientGet( break; case DISABLED: job = TestHelpers.randomAnomalyDetectorJob(false, jobEnabledTime, null); - listener.onResponse(TestHelpers.createGetResponse(job, detector.getDetectorId(), CommonName.JOB_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(job, detector.getId(), CommonName.JOB_INDEX)); break; case ENABLED: job = TestHelpers.randomAnomalyDetectorJob(true, jobEnabledTime, null); - listener.onResponse(TestHelpers.createGetResponse(job, detector.getDetectorId(), CommonName.JOB_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(job, detector.getId(), CommonName.JOB_INDEX)); break; default: assertTrue("should not reach here", false); @@ -164,10 +165,7 @@ private void setUpClientGet( if (error != null) { result.error(error); } - listener - .onResponse( - TestHelpers.createGetResponse(result.build(), detector.getDetectorId(), ADCommonName.DETECTION_STATE_INDEX) - ); + listener.onResponse(TestHelpers.createGetResponse(result.build(), detector.getId(), ADCommonName.DETECTION_STATE_INDEX)); } @@ -211,7 +209,7 @@ public void testDisabledJobIndexTemplate(JobStatus status) throws IOException, I DetectorProfile expectedProfile = new DetectorProfile.Builder().state(DetectorState.DISABLED).build(); final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertEquals(expectedProfile, response); inProgressLatch.countDown(); }, exception -> { @@ -235,7 +233,7 @@ public void testInitOrRunningStateTemplate(RCFPollingStatus status, DetectorStat DetectorProfile expectedProfile = new DetectorProfile.Builder().state(expectedState).build(); final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertEquals(expectedProfile, response); inProgressLatch.countDown(); }, exception -> { @@ -305,7 +303,7 @@ public void testErrorStateTemplate( DetectorProfile expectedProfile = builder.build(); final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertEquals(expectedProfile, response); inProgressLatch.countDown(); }, exception -> { @@ -516,7 +514,7 @@ public void testProfileModels() throws InterruptedException, IOException { final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(profileResponse -> { + runner.profile(detector.getId(), ActionListener.wrap(profileResponse -> { assertEquals(node1, profileResponse.getCoordinatingNode()); assertEquals(shingleSize, profileResponse.getShingleSize()); assertEquals(modelSize * 2, profileResponse.getTotalSizeInBytes()); @@ -548,7 +546,7 @@ public void testInitProgress() throws IOException, InterruptedException { expectedProfile.setInitProgress(profile); final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertEquals(expectedProfile, response); inProgressLatch.countDown(); }, exception -> { @@ -567,7 +565,7 @@ public void testInitProgressFailImmediately() throws IOException, InterruptedExc expectedProfile.setInitProgress(profile); final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertTrue("Should not reach here ", false); inProgressLatch.countDown(); }, exception -> { @@ -585,7 +583,7 @@ public void testInitNoUpdateNoIndex() throws IOException, InterruptedException { .build(); final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertEquals(expectedProfile, response); inProgressLatch.countDown(); }, exception -> { @@ -607,7 +605,7 @@ public void testInitNoIndex() throws IOException, InterruptedException { .build(); final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertEquals(expectedProfile, response); inProgressLatch.countDown(); }, exception -> { @@ -632,7 +630,7 @@ public void testFailRCFPolling() throws IOException, InterruptedException { setUpClientGet(DetectorStatus.EXIST, JobStatus.ENABLED, RCFPollingStatus.EXCEPTION, ErrorResultStatus.NO_ERROR); final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertTrue("Should not reach here ", false); inProgressLatch.countDown(); }, exception -> { diff --git a/src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java b/src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java index d9eb33955..89035eff8 100644 --- a/src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java +++ b/src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java @@ -44,6 +44,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.RestStatus; import org.opensearch.test.rest.OpenSearchRestTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.DateRange; import com.google.common.collect.ImmutableList; @@ -123,9 +124,9 @@ protected AnomalyDetector createRandomAnomalyDetector( AnomalyDetector createdDetector = createAnomalyDetector(detector, refresh, client); if (withMetadata) { - return getAnomalyDetector(createdDetector.getDetectorId(), new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"), client); + return getAnomalyDetector(createdDetector.getId(), new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"), client); } - return getAnomalyDetector(createdDetector.getDetectorId(), new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), client); + return getAnomalyDetector(createdDetector.getId(), new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), client); } protected AnomalyDetector createAnomalyDetector(AnomalyDetector detector, Boolean refresh, RestClient client) throws IOException { @@ -302,7 +303,7 @@ public ToXContentObject[] getAnomalyDetector( detector.getIndices(), detector.getFeatureAttributes(), detector.getFilterQuery(), - detector.getDetectionInterval(), + detector.getInterval(), detector.getWindowDelay(), detector.getShingleSize(), detector.getUiMetadata(), @@ -310,7 +311,8 @@ public ToXContentObject[] getAnomalyDetector( detector.getLastUpdateTime(), null, detector.getUser(), - detector.getResultIndex() + detector.getCustomResultIndex(), + detector.getImputationOption() ), detectorJob, historicalAdTask, @@ -634,15 +636,16 @@ protected AnomalyDetector cloneDetector(AnomalyDetector anomalyDetector, String anomalyDetector.getIndices(), anomalyDetector.getFeatureAttributes(), anomalyDetector.getFilterQuery(), - anomalyDetector.getDetectionInterval(), + anomalyDetector.getInterval(), anomalyDetector.getWindowDelay(), anomalyDetector.getShingleSize(), anomalyDetector.getUiMetadata(), anomalyDetector.getSchemaVersion(), Instant.now(), - anomalyDetector.getCategoryField(), + anomalyDetector.getCategoryFields(), null, - resultIndex + resultIndex, + anomalyDetector.getImputationOption() ); return detector; } diff --git a/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java b/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java index 7ade2c597..f40240267 100644 --- a/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java @@ -57,10 +57,12 @@ import org.opensearch.search.aggregations.InternalAggregations; import org.opensearch.search.aggregations.metrics.InternalMax; import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.IntervalTimeConfiguration; -public class EntityProfileRunnerTests extends AbstractADTest { +public class EntityProfileRunnerTests extends AbstractTimeSeriesTest { private AnomalyDetector detector; private int detectorIntervalMin; private Client client; @@ -144,9 +146,9 @@ public void setUp() throws Exception { String indexName = request.index(); if (indexName.equals(CommonName.CONFIG_INDEX)) { - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); } else if (indexName.equals(CommonName.JOB_INDEX)) { - listener.onResponse(TestHelpers.createGetResponse(job, detector.getDetectorId(), CommonName.JOB_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(job, detector.getId(), CommonName.JOB_INDEX)); } return null; @@ -352,7 +354,7 @@ public void testJobIndexNotFound() throws InterruptedException { String indexName = request.index(); if (indexName.equals(CommonName.CONFIG_INDEX)) { - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); } else if (indexName.equals(CommonName.JOB_INDEX)) { listener.onFailure(new IndexNotFoundException(CommonName.JOB_INDEX)); } @@ -384,7 +386,7 @@ public void testNotMultiEntityDetector() throws IOException, InterruptedExceptio String indexName = request.index(); if (indexName.equals(CommonName.CONFIG_INDEX)) { - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); } return null; @@ -410,11 +412,7 @@ public void testInitNInfo() throws InterruptedException { // 1 / 128 rounded to 1% int neededSamples = requiredSamples - smallUpdates; - InitProgressProfile profile = new InitProgressProfile( - "1%", - neededSamples * detector.getDetectorIntervalInSeconds() / 60, - neededSamples - ); + InitProgressProfile profile = new InitProgressProfile("1%", neededSamples * detector.getIntervalInSeconds() / 60, neededSamples); expectedProfile.initProgress(profile); expectedProfile.isActive(isActive); expectedProfile.lastActiveTimestampMs(latestActiveTimestamp); diff --git a/src/test/java/org/opensearch/ad/HistoricalAnalysisIntegTestCase.java b/src/test/java/org/opensearch/ad/HistoricalAnalysisIntegTestCase.java index 135635250..1c0113f59 100644 --- a/src/test/java/org/opensearch/ad/HistoricalAnalysisIntegTestCase.java +++ b/src/test/java/org/opensearch/ad/HistoricalAnalysisIntegTestCase.java @@ -53,6 +53,7 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.transport.MockTransportService; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Feature; @@ -129,7 +130,7 @@ public AnomalyDetector randomDetector(List features) throws IOException } public ADTask randomCreatedADTask(String taskId, AnomalyDetector detector, DateRange detectionDateRange) { - String detectorId = detector == null ? null : detector.getDetectorId(); + String detectorId = detector == null ? null : detector.getId(); return randomCreatedADTask(taskId, detector, detectorId, detectionDateRange); } diff --git a/src/test/java/org/opensearch/ad/HistoricalAnalysisRestTestCase.java b/src/test/java/org/opensearch/ad/HistoricalAnalysisRestTestCase.java index c37fbb646..464f6868c 100644 --- a/src/test/java/org/opensearch/ad/HistoricalAnalysisRestTestCase.java +++ b/src/test/java/org/opensearch/ad/HistoricalAnalysisRestTestCase.java @@ -36,6 +36,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.rest.RestStatus; import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Feature; diff --git a/src/test/java/org/opensearch/ad/MultiEntityProfileRunnerTests.java b/src/test/java/org/opensearch/ad/MultiEntityProfileRunnerTests.java index 825e6c069..3d8eb7617 100644 --- a/src/test/java/org/opensearch/ad/MultiEntityProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/MultiEntityProfileRunnerTests.java @@ -63,10 +63,12 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.transport.TransportService; -public class MultiEntityProfileRunnerTests extends AbstractADTest { +public class MultiEntityProfileRunnerTests extends AbstractTimeSeriesTest { private AnomalyDetectorProfileRunner runner; private Client client; private SecurityClientUtil clientUtil; @@ -150,14 +152,11 @@ public void setUp() throws Exception { String indexName = request.index(); if (indexName.equals(CommonName.CONFIG_INDEX)) { - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); } else if (indexName.equals(ADCommonName.DETECTION_STATE_INDEX)) { - listener - .onResponse( - TestHelpers.createGetResponse(result.build(), detector.getDetectorId(), ADCommonName.DETECTION_STATE_INDEX) - ); + listener.onResponse(TestHelpers.createGetResponse(result.build(), detector.getId(), ADCommonName.DETECTION_STATE_INDEX)); } else if (indexName.equals(CommonName.JOB_INDEX)) { - listener.onResponse(TestHelpers.createGetResponse(job, detector.getDetectorId(), CommonName.JOB_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(job, detector.getId(), CommonName.JOB_INDEX)); } return null; diff --git a/src/test/java/org/opensearch/ad/NodeStateManagerTests.java b/src/test/java/org/opensearch/ad/NodeStateManagerTests.java index 8aba72aa0..9cad7d5eb 100644 --- a/src/test/java/org/opensearch/ad/NodeStateManagerTests.java +++ b/src/test/java/org/opensearch/ad/NodeStateManagerTests.java @@ -62,11 +62,13 @@ import org.opensearch.test.ClusterServiceUtils; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import com.google.common.collect.ImmutableMap; -public class NodeStateManagerTests extends AbstractADTest { +public class NodeStateManagerTests extends AbstractTimeSeriesTest { private NodeStateManager stateManager; private Client client; private ClientUtil clientUtil; @@ -167,11 +169,11 @@ private String setupDetector() throws IOException { } assertTrue(request != null && listener != null); - listener.onResponse(TestHelpers.createGetResponse(detectorToCheck, detectorToCheck.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detectorToCheck, detectorToCheck.getId(), CommonName.CONFIG_INDEX)); return null; }).when(client).get(any(), any(ActionListener.class)); - return detectorToCheck.getDetectorId(); + return detectorToCheck.getId(); } @SuppressWarnings("unchecked") @@ -247,12 +249,13 @@ public void testHasRunningQuery() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of(), null); SearchRequest dummySearchRequest = new SearchRequest(); assertFalse(stateManager.hasRunningQuery(detector)); - throttler.insertFilteredQuery(detector.getDetectorId(), dummySearchRequest); + throttler.insertFilteredQuery(detector.getId(), dummySearchRequest); assertTrue(stateManager.hasRunningQuery(detector)); } public void testGetAnomalyDetector() throws IOException, InterruptedException { String detectorId = setupDetector(); + final CountDownLatch inProgressLatch = new CountDownLatch(1); stateManager.getAnomalyDetector(detectorId, ActionListener.wrap(asDetector -> { assertEquals(detectorToCheck, asDetector.get()); diff --git a/src/test/java/org/opensearch/ad/NodeStateTests.java b/src/test/java/org/opensearch/ad/NodeStateTests.java index 17bffbd6b..c48afdb76 100644 --- a/src/test/java/org/opensearch/ad/NodeStateTests.java +++ b/src/test/java/org/opensearch/ad/NodeStateTests.java @@ -20,6 +20,7 @@ import java.time.Instant; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; public class NodeStateTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/ad/bwc/ADBackwardsCompatibilityIT.java b/src/test/java/org/opensearch/ad/bwc/ADBackwardsCompatibilityIT.java index 924709539..cd58af595 100644 --- a/src/test/java/org/opensearch/ad/bwc/ADBackwardsCompatibilityIT.java +++ b/src/test/java/org/opensearch/ad/bwc/ADBackwardsCompatibilityIT.java @@ -39,7 +39,6 @@ import org.apache.hc.core5.http.HttpEntity; import org.junit.Assert; import org.junit.Before; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.mock.model.MockSimpleLog; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskType; @@ -52,6 +51,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.rest.RestStatus; import org.opensearch.test.rest.OpenSearchRestTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/caching/AbstractCacheTest.java b/src/test/java/org/opensearch/ad/caching/AbstractCacheTest.java index 717d62379..da816a0c0 100644 --- a/src/test/java/org/opensearch/ad/caching/AbstractCacheTest.java +++ b/src/test/java/org/opensearch/ad/caching/AbstractCacheTest.java @@ -22,7 +22,6 @@ import java.util.Random; import org.junit.Before; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.MemoryTracker; import org.opensearch.ad.ml.EntityModel; import org.opensearch.ad.ml.ModelManager.ModelType; @@ -32,8 +31,9 @@ import org.opensearch.ad.ratelimit.CheckpointMaintainWorker; import org.opensearch.ad.ratelimit.CheckpointWriteWorker; import org.opensearch.ad.settings.AnomalyDetectorSettings; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class AbstractCacheTest extends AbstractADTest { +public class AbstractCacheTest extends AbstractTimeSeriesTest { protected String modelId1, modelId2, modelId3, modelId4; protected Entity entity1, entity2, entity3, entity4; protected ModelState modelState1, modelState2, modelState3, modelState4; @@ -56,10 +56,10 @@ public void setUp() throws Exception { super.setUp(); detector = mock(AnomalyDetector.class); detectorId = "123"; - when(detector.getDetectorId()).thenReturn(detectorId); + when(detector.getId()).thenReturn(detectorId); detectorDuration = Duration.ofMinutes(5); - when(detector.getDetectionIntervalDuration()).thenReturn(detectorDuration); - when(detector.getDetectorIntervalInSeconds()).thenReturn(detectorDuration.getSeconds()); + when(detector.getIntervalDuration()).thenReturn(detectorDuration); + when(detector.getIntervalInSeconds()).thenReturn(detectorDuration.getSeconds()); when(detector.getEnabledFeatureIds()).thenReturn(new ArrayList() { { add("a"); diff --git a/src/test/java/org/opensearch/ad/caching/PriorityCacheTests.java b/src/test/java/org/opensearch/ad/caching/PriorityCacheTests.java index 775bfc01c..d73aa37fb 100644 --- a/src/test/java/org/opensearch/ad/caching/PriorityCacheTests.java +++ b/src/test/java/org/opensearch/ad/caching/PriorityCacheTests.java @@ -141,9 +141,9 @@ public void setUp() throws Exception { detector2 = mock(AnomalyDetector.class); detectorId2 = "456"; - when(detector2.getDetectorId()).thenReturn(detectorId2); - when(detector2.getDetectionIntervalDuration()).thenReturn(detectorDuration); - when(detector2.getDetectorIntervalInSeconds()).thenReturn(detectorDuration.getSeconds()); + when(detector2.getId()).thenReturn(detectorId2); + when(detector2.getIntervalDuration()).thenReturn(detectorDuration); + when(detector2.getIntervalInSeconds()).thenReturn(detectorDuration.getSeconds()); point = new double[] { 0.1 }; } @@ -199,7 +199,7 @@ public void testCacheHit() { assertEquals(1, entityCache.getTotalActiveEntities()); assertEquals(1, entityCache.getAllModels().size()); ModelState hitState = entityCache.get(modelState1.getModelId(), detector); - assertEquals(detectorId, hitState.getDetectorId()); + assertEquals(detectorId, hitState.getId()); EntityModel model = hitState.getModel(); assertEquals(false, model.getTrcf().isPresent()); assertTrue(model.getSamples().isEmpty()); @@ -654,7 +654,7 @@ public void testLongDetectorInterval() { try { ADEnabledSetting.getInstance().setSettingValue(ADEnabledSetting.DOOR_KEEPER_IN_CACHE_ENABLED, true); when(clock.instant()).thenReturn(Instant.ofEpochSecond(1000)); - when(detector.getDetectionIntervalDuration()).thenReturn(Duration.ofHours(12)); + when(detector.getIntervalDuration()).thenReturn(Duration.ofHours(12)); String modelId = entity1.getModelId(detectorId).get(); // record last access time 1000 assertTrue(null == entityCache.get(modelId, detector)); diff --git a/src/test/java/org/opensearch/ad/cluster/ADClusterEventListenerTests.java b/src/test/java/org/opensearch/ad/cluster/ADClusterEventListenerTests.java index 653c19a2b..415ec75fe 100644 --- a/src/test/java/org/opensearch/ad/cluster/ADClusterEventListenerTests.java +++ b/src/test/java/org/opensearch/ad/cluster/ADClusterEventListenerTests.java @@ -28,7 +28,6 @@ import org.junit.BeforeClass; import org.opensearch.Version; import org.opensearch.action.ActionListener; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.cluster.ClusterChangedEvent; import org.opensearch.cluster.ClusterName; @@ -38,8 +37,9 @@ import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.cluster.service.ClusterService; import org.opensearch.gateway.GatewayService; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class ADClusterEventListenerTests extends AbstractADTest { +public class ADClusterEventListenerTests extends AbstractTimeSeriesTest { private final String clusterManagerNodeId = "clusterManagerNode"; private final String dataNode1Id = "dataNode1"; private final String clusterName = "multi-node-cluster"; diff --git a/src/test/java/org/opensearch/ad/cluster/ADDataMigratorTests.java b/src/test/java/org/opensearch/ad/cluster/ADDataMigratorTests.java index a6623e9d7..64a786b32 100644 --- a/src/test/java/org/opensearch/ad/cluster/ADDataMigratorTests.java +++ b/src/test/java/org/opensearch/ad/cluster/ADDataMigratorTests.java @@ -34,7 +34,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.ad.ADUnitTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.client.Client; import org.opensearch.cluster.node.DiscoveryNode; @@ -47,6 +46,7 @@ import org.opensearch.search.SearchHits; import org.opensearch.search.aggregations.InternalAggregations; import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; public class ADDataMigratorTests extends ADUnitTestCase { diff --git a/src/test/java/org/opensearch/ad/cluster/ClusterManagerEventListenerTests.java b/src/test/java/org/opensearch/ad/cluster/ClusterManagerEventListenerTests.java index c7ec778a5..6dcda085a 100644 --- a/src/test/java/org/opensearch/ad/cluster/ClusterManagerEventListenerTests.java +++ b/src/test/java/org/opensearch/ad/cluster/ClusterManagerEventListenerTests.java @@ -27,7 +27,6 @@ import java.util.Locale; import org.junit.Before; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.cluster.diskcleanup.ModelCheckpointIndexRetention; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.settings.AnomalyDetectorSettings; @@ -41,8 +40,9 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.threadpool.Scheduler.Cancellable; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class ClusterManagerEventListenerTests extends AbstractADTest { +public class ClusterManagerEventListenerTests extends AbstractTimeSeriesTest { private ClusterService clusterService; private ThreadPool threadPool; private Client client; diff --git a/src/test/java/org/opensearch/ad/cluster/DailyCronTests.java b/src/test/java/org/opensearch/ad/cluster/DailyCronTests.java index 5bbb4cb7f..63d48ef3c 100644 --- a/src/test/java/org/opensearch/ad/cluster/DailyCronTests.java +++ b/src/test/java/org/opensearch/ad/cluster/DailyCronTests.java @@ -24,13 +24,13 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.util.ClientUtil; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryAction; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class DailyCronTests extends AbstractADTest { +public class DailyCronTests extends AbstractTimeSeriesTest { enum DailyCronTestExecutionMode { NORMAL, diff --git a/src/test/java/org/opensearch/ad/cluster/HourlyCronTests.java b/src/test/java/org/opensearch/ad/cluster/HourlyCronTests.java index e5fa6cd93..7e5f6c83e 100644 --- a/src/test/java/org/opensearch/ad/cluster/HourlyCronTests.java +++ b/src/test/java/org/opensearch/ad/cluster/HourlyCronTests.java @@ -28,7 +28,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionListener; import org.opensearch.action.FailedNodeException; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.transport.CronAction; import org.opensearch.ad.transport.CronNodeResponse; @@ -40,10 +39,11 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import test.org.opensearch.ad.util.ClusterCreation; -public class HourlyCronTests extends AbstractADTest { +public class HourlyCronTests extends AbstractTimeSeriesTest { enum HourlyCronTestExecutionMode { NORMAL, diff --git a/src/test/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanupTests.java b/src/test/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanupTests.java index 8e96e21c3..1425a5ec3 100644 --- a/src/test/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanupTests.java +++ b/src/test/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanupTests.java @@ -29,7 +29,6 @@ import org.opensearch.action.admin.indices.stats.CommonStats; import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; import org.opensearch.action.admin.indices.stats.ShardStats; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.client.IndicesAdminClient; @@ -38,8 +37,9 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.index.store.StoreStats; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class IndexCleanupTests extends AbstractADTest { +public class IndexCleanupTests extends AbstractTimeSeriesTest { @Mock(answer = Answers.RETURNS_DEEP_STUBS) Client client; diff --git a/src/test/java/org/opensearch/ad/cluster/diskcleanup/ModelCheckpointIndexRetentionTests.java b/src/test/java/org/opensearch/ad/cluster/diskcleanup/ModelCheckpointIndexRetentionTests.java index ffdb1a042..0222a4d47 100644 --- a/src/test/java/org/opensearch/ad/cluster/diskcleanup/ModelCheckpointIndexRetentionTests.java +++ b/src/test/java/org/opensearch/ad/cluster/diskcleanup/ModelCheckpointIndexRetentionTests.java @@ -27,10 +27,10 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.opensearch.action.ActionListener; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.constant.ADCommonName; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class ModelCheckpointIndexRetentionTests extends AbstractADTest { +public class ModelCheckpointIndexRetentionTests extends AbstractTimeSeriesTest { Duration defaultCheckpointTtl = Duration.ofDays(3); diff --git a/src/test/java/org/opensearch/ad/common/exception/LimitExceededExceptionTests.java b/src/test/java/org/opensearch/ad/common/exception/LimitExceededExceptionTests.java index c2d40718a..37b3770ff 100644 --- a/src/test/java/org/opensearch/ad/common/exception/LimitExceededExceptionTests.java +++ b/src/test/java/org/opensearch/ad/common/exception/LimitExceededExceptionTests.java @@ -23,7 +23,7 @@ public void testConstructorWithIdAndExplanation() { String id = "test id"; String message = "test message"; LimitExceededException limitExceeded = new LimitExceededException(id, message); - assertEquals(id, limitExceeded.getAnomalyDetectorId()); + assertEquals(id, limitExceeded.getConfigId()); assertEquals(message, limitExceeded.getMessage()); } } diff --git a/src/test/java/org/opensearch/ad/common/exception/NotSerializedADExceptionNameTests.java b/src/test/java/org/opensearch/ad/common/exception/NotSerializedADExceptionNameTests.java index 2ac0b0312..b76e94c64 100644 --- a/src/test/java/org/opensearch/ad/common/exception/NotSerializedADExceptionNameTests.java +++ b/src/test/java/org/opensearch/ad/common/exception/NotSerializedADExceptionNameTests.java @@ -26,49 +26,49 @@ public class NotSerializedADExceptionNameTests extends OpenSearchTestCase { public void testConvertAnomalyDetectionException() { Optional converted = NotSerializedExceptionName - .convertWrappedAnomalyDetectionException(new NotSerializableExceptionWrapper(new TimeSeriesException("", "")), ""); + .convertWrappedTimeSeriesException(new NotSerializableExceptionWrapper(new TimeSeriesException("", "")), ""); assertTrue(converted.isPresent()); assertTrue(converted.get() instanceof TimeSeriesException); } public void testConvertInternalFailure() { Optional converted = NotSerializedExceptionName - .convertWrappedAnomalyDetectionException(new NotSerializableExceptionWrapper(new InternalFailure("", "")), ""); + .convertWrappedTimeSeriesException(new NotSerializableExceptionWrapper(new InternalFailure("", "")), ""); assertTrue(converted.isPresent()); assertTrue(converted.get() instanceof InternalFailure); } public void testConvertClientException() { Optional converted = NotSerializedExceptionName - .convertWrappedAnomalyDetectionException(new NotSerializableExceptionWrapper(new ClientException("", "")), ""); + .convertWrappedTimeSeriesException(new NotSerializableExceptionWrapper(new ClientException("", "")), ""); assertTrue(converted.isPresent()); assertTrue(converted.get() instanceof ClientException); } public void testConvertADTaskCancelledException() { Optional converted = NotSerializedExceptionName - .convertWrappedAnomalyDetectionException(new NotSerializableExceptionWrapper(new TaskCancelledException("", "")), ""); + .convertWrappedTimeSeriesException(new NotSerializableExceptionWrapper(new TaskCancelledException("", "")), ""); assertTrue(converted.isPresent()); assertTrue(converted.get() instanceof TaskCancelledException); } public void testConvertDuplicateTaskException() { Optional converted = NotSerializedExceptionName - .convertWrappedAnomalyDetectionException(new NotSerializableExceptionWrapper(new DuplicateTaskException("")), ""); + .convertWrappedTimeSeriesException(new NotSerializableExceptionWrapper(new DuplicateTaskException("")), ""); assertTrue(converted.isPresent()); assertTrue(converted.get() instanceof DuplicateTaskException); } public void testConvertADValidationException() { Optional converted = NotSerializedExceptionName - .convertWrappedAnomalyDetectionException(new NotSerializableExceptionWrapper(new ValidationException("", null, null)), ""); + .convertWrappedTimeSeriesException(new NotSerializableExceptionWrapper(new ValidationException("", null, null)), ""); assertTrue(converted.isPresent()); assertTrue(converted.get() instanceof ValidationException); } public void testUnknownException() { Optional converted = NotSerializedExceptionName - .convertWrappedAnomalyDetectionException(new NotSerializableExceptionWrapper(new RuntimeException("")), ""); + .convertWrappedTimeSeriesException(new NotSerializableExceptionWrapper(new RuntimeException("")), ""); assertTrue(!converted.isPresent()); } } diff --git a/src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java b/src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java index 02bf8dbd4..919b3e068 100644 --- a/src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java +++ b/src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java @@ -11,9 +11,9 @@ package org.opensearch.ad.e2e; -import static org.opensearch.ad.TestHelpers.toHttpEntity; import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE; +import static org.opensearch.timeseries.TestHelpers.toHttpEntity; import java.io.File; import java.io.FileReader; @@ -29,7 +29,6 @@ import org.apache.hc.core5.http.HttpHeaders; import org.apache.hc.core5.http.message.BasicHeader; import org.opensearch.ad.ODFERestTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.client.Request; import org.opensearch.client.RequestOptions; import org.opensearch.client.Response; @@ -38,6 +37,7 @@ import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.common.Strings; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; import com.google.gson.JsonArray; diff --git a/src/test/java/org/opensearch/ad/e2e/DetectionResultEvalutationIT.java b/src/test/java/org/opensearch/ad/e2e/DetectionResultEvalutationIT.java index ce37e155c..8edab0d15 100644 --- a/src/test/java/org/opensearch/ad/e2e/DetectionResultEvalutationIT.java +++ b/src/test/java/org/opensearch/ad/e2e/DetectionResultEvalutationIT.java @@ -11,7 +11,7 @@ package org.opensearch.ad.e2e; -import static org.opensearch.ad.TestHelpers.toHttpEntity; +import static org.opensearch.timeseries.TestHelpers.toHttpEntity; import java.text.SimpleDateFormat; import java.time.Clock; @@ -27,12 +27,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Logger; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.client.RestClient; import org.opensearch.common.xcontent.support.XContentMapValues; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableMap; import com.google.gson.JsonElement; diff --git a/src/test/java/org/opensearch/ad/e2e/SingleStreamModelPerfIT.java b/src/test/java/org/opensearch/ad/e2e/SingleStreamModelPerfIT.java index 710c8b6ec..04f959442 100644 --- a/src/test/java/org/opensearch/ad/e2e/SingleStreamModelPerfIT.java +++ b/src/test/java/org/opensearch/ad/e2e/SingleStreamModelPerfIT.java @@ -11,7 +11,7 @@ package org.opensearch.ad.e2e; -import static org.opensearch.ad.TestHelpers.toHttpEntity; +import static org.opensearch.timeseries.TestHelpers.toHttpEntity; import java.io.File; import java.io.FileReader; @@ -32,8 +32,8 @@ import org.apache.hc.core5.http.message.BasicHeader; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Logger; -import org.opensearch.ad.TestHelpers; import org.opensearch.client.RestClient; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; import com.google.gson.JsonArray; diff --git a/src/test/java/org/opensearch/ad/feature/FeatureManagerTests.java b/src/test/java/org/opensearch/ad/feature/FeatureManagerTests.java index 5f3c29433..e0ab972a2 100644 --- a/src/test/java/org/opensearch/ad/feature/FeatureManagerTests.java +++ b/src/test/java/org/opensearch/ad/feature/FeatureManagerTests.java @@ -118,11 +118,11 @@ public void setup() { featureBufferTtl = Duration.ofMillis(1_000L); detectorId = "id"; - when(detector.getDetectorId()).thenReturn(detectorId); + when(detector.getId()).thenReturn(detectorId); when(detector.getShingleSize()).thenReturn(shingleSize); IntervalTimeConfiguration detectorIntervalTimeConfig = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES); intervalInMilliseconds = detectorIntervalTimeConfig.toDuration().toMillis(); - when(detector.getDetectorIntervalInMilliseconds()).thenReturn(intervalInMilliseconds); + when(detector.getIntervalInMilliseconds()).thenReturn(intervalInMilliseconds); Imputer imputer = new LinearUniformImputer(false); @@ -195,7 +195,7 @@ public void getColdStartData_returnExpectedToListener( double[][] expected ) throws Exception { long detectionInterval = (new IntervalTimeConfiguration(15, ChronoUnit.MINUTES)).toDuration().toMillis(); - when(detector.getDetectorIntervalInMilliseconds()).thenReturn(detectionInterval); + when(detector.getIntervalInMilliseconds()).thenReturn(detectionInterval); when(detector.getShingleSize()).thenReturn(4); doAnswer(invocation -> { ActionListener> listener = invocation.getArgument(1); @@ -335,7 +335,7 @@ public void clear_deleteFeatures() throws IOException { assertTrue(beforeMaintenance.getUnprocessedFeatures().isPresent()); assertTrue(beforeMaintenance.getProcessedFeatures().isPresent()); - featureManager.clear(detector.getDetectorId()); + featureManager.clear(detector.getId()); SinglePointFeatures afterMaintenance = getCurrentFeatures(detector, start, end); assertTrue(afterMaintenance.getUnprocessedFeatures().isPresent()); @@ -427,7 +427,7 @@ private void getPreviewFeaturesTemplate(List> samplesResults, long start = 0L; long end = 240_000L; long detectionInterval = (new IntervalTimeConfiguration(1, ChronoUnit.MINUTES)).toDuration().toMillis(); - when(detector.getDetectorIntervalInMilliseconds()).thenReturn(detectionInterval); + when(detector.getIntervalInMilliseconds()).thenReturn(detectionInterval); List> sampleRanges = Arrays.asList(new SimpleEntry<>(0L, 60_000L), new SimpleEntry<>(120_000L, 180_000L)); doAnswer(invocation -> { @@ -989,7 +989,7 @@ public void getCurrentFeatures_setsShingleSizeFromDetectorConfig(int shingleSize SinglePointFeatures listenerResponse = getCurrentFeatures(detector, 0, intervalInMilliseconds); assertTrue(listenerResponse.getProcessedFeatures().isPresent()); assertEquals(listenerResponse.getProcessedFeatures().get().length, shingleSize); - assertEquals(featureManager.getShingleSize(detector.getDetectorId()), shingleSize); + assertEquals(featureManager.getShingleSize(detector.getId()), shingleSize); } @Test diff --git a/src/test/java/org/opensearch/ad/feature/NoPowermockSearchFeatureDaoTests.java b/src/test/java/org/opensearch/ad/feature/NoPowermockSearchFeatureDaoTests.java index 168308583..2e628ee1a 100644 --- a/src/test/java/org/opensearch/ad/feature/NoPowermockSearchFeatureDaoTests.java +++ b/src/test/java/org/opensearch/ad/feature/NoPowermockSearchFeatureDaoTests.java @@ -55,21 +55,19 @@ import org.opensearch.action.search.SearchResponse.Clusters; import org.opensearch.action.search.SearchResponseSections; import org.opensearch.action.search.ShardSearchFailure; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.Entity; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.lease.Releasables; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; +import org.opensearch.core.common.lease.Releasables; import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.index.query.QueryBuilders; import org.opensearch.indices.breaker.NoneCircuitBreakerService; @@ -96,6 +94,8 @@ import org.opensearch.search.aggregations.metrics.InternalMax; import org.opensearch.search.aggregations.metrics.SumAggregationBuilder; import org.opensearch.search.internal.InternalSearchResponse; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.dataprocessor.Imputer; import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; import org.opensearch.timeseries.model.Feature; @@ -109,7 +109,7 @@ * Create a new class for new tests related to SearchFeatureDao. * */ -public class NoPowermockSearchFeatureDaoTests extends AbstractADTest { +public class NoPowermockSearchFeatureDaoTests extends AbstractTimeSeriesTest { private final Logger LOG = LogManager.getLogger(NoPowermockSearchFeatureDaoTests.class); private AnomalyDetector detector; @@ -141,14 +141,14 @@ public void setUp() throws Exception { hostField = "host"; detector = mock(AnomalyDetector.class); - when(detector.isMultientityDetector()).thenReturn(true); - when(detector.getCategoryField()).thenReturn(Arrays.asList(new String[] { serviceField, hostField })); + when(detector.isHighCardinality()).thenReturn(true); + when(detector.getCategoryFields()).thenReturn(Arrays.asList(new String[] { serviceField, hostField })); detectorId = "123"; - when(detector.getDetectorId()).thenReturn(detectorId); + when(detector.getId()).thenReturn(detectorId); when(detector.getTimeField()).thenReturn("testTimeField"); when(detector.getIndices()).thenReturn(Arrays.asList("testIndices")); IntervalTimeConfiguration detectionInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES); - when(detector.getDetectionInterval()).thenReturn(detectionInterval); + when(detector.getInterval()).thenReturn(detectionInterval); when(detector.getFilterQuery()).thenReturn(QueryBuilders.matchAllQuery()); client = mock(Client.class); @@ -296,7 +296,7 @@ public void testGetHighestCountEntitiesUsingTermsAgg() { }).when(client).search(any(SearchRequest.class), any(ActionListener.class)); String categoryField = "fieldName"; - when(detector.getCategoryField()).thenReturn(Collections.singletonList(categoryField)); + when(detector.getCategoryFields()).thenReturn(Collections.singletonList(categoryField)); ActionListener> listener = mock(ActionListener.class); searchFeatureDao.getHighestCountEntities(detector, 10L, 20L, listener); diff --git a/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoParamTests.java b/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoParamTests.java index 205c093fe..c1d6cdb72 100644 --- a/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoParamTests.java +++ b/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoParamTests.java @@ -184,12 +184,12 @@ public void setup() throws Exception { detectionInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES); detectorId = "123"; - when(detector.getDetectorId()).thenReturn(detectorId); + when(detector.getId()).thenReturn(detectorId); when(detector.getTimeField()).thenReturn("testTimeField"); when(detector.getIndices()).thenReturn(Arrays.asList("testIndices")); - when(detector.getDetectionInterval()).thenReturn(detectionInterval); + when(detector.getInterval()).thenReturn(detectionInterval); when(detector.getFilterQuery()).thenReturn(QueryBuilders.matchAllQuery()); - when(detector.getCategoryField()).thenReturn(Collections.singletonList("a")); + when(detector.getCategoryFields()).thenReturn(Collections.singletonList("a")); searchSourceBuilder = SearchSourceBuilder .fromXContent(XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, "{}")); diff --git a/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoTests.java b/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoTests.java index 37e660011..b945b4c21 100644 --- a/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoTests.java +++ b/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoTests.java @@ -187,12 +187,12 @@ public void setup() throws Exception { detectionInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES); detectorId = "123"; - when(detector.getDetectorId()).thenReturn(detectorId); + when(detector.getId()).thenReturn(detectorId); when(detector.getTimeField()).thenReturn("testTimeField"); when(detector.getIndices()).thenReturn(Arrays.asList("testIndices")); - when(detector.getDetectionInterval()).thenReturn(detectionInterval); + when(detector.getInterval()).thenReturn(detectionInterval); when(detector.getFilterQuery()).thenReturn(QueryBuilders.matchAllQuery()); - when(detector.getCategoryField()).thenReturn(Collections.singletonList("a")); + when(detector.getCategoryFields()).thenReturn(Collections.singletonList("a")); searchSourceBuilder = SearchSourceBuilder .fromXContent(XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, "{}")); diff --git a/src/test/java/org/opensearch/ad/indices/AnomalyDetectionIndicesTests.java b/src/test/java/org/opensearch/ad/indices/AnomalyDetectionIndicesTests.java index 42176cee4..7bdd7039a 100644 --- a/src/test/java/org/opensearch/ad/indices/AnomalyDetectionIndicesTests.java +++ b/src/test/java/org/opensearch/ad/indices/AnomalyDetectionIndicesTests.java @@ -19,7 +19,6 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.action.index.IndexResponse; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.settings.AnomalyDetectorSettings; @@ -32,6 +31,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.rest.RestStatus; import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; public class AnomalyDetectionIndicesTests extends OpenSearchIntegTestCase { diff --git a/src/test/java/org/opensearch/ad/indices/CustomIndexTests.java b/src/test/java/org/opensearch/ad/indices/CustomIndexTests.java index a8d0c97f7..7959330f4 100644 --- a/src/test/java/org/opensearch/ad/indices/CustomIndexTests.java +++ b/src/test/java/org/opensearch/ad/indices/CustomIndexTests.java @@ -22,7 +22,6 @@ import java.util.Map; import org.opensearch.Version; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.util.DiscoveryNodeFilterer; @@ -35,9 +34,10 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.timeseries.constant.CommonName; -public class CustomIndexTests extends AbstractADTest { +public class CustomIndexTests extends AbstractTimeSeriesTest { AnomalyDetectionIndices adIndices; Client client; ClusterService clusterService; diff --git a/src/test/java/org/opensearch/ad/indices/InitAnomalyDetectionIndicesTests.java b/src/test/java/org/opensearch/ad/indices/InitAnomalyDetectionIndicesTests.java index 4b4bf4b0f..fd92b6573 100644 --- a/src/test/java/org/opensearch/ad/indices/InitAnomalyDetectionIndicesTests.java +++ b/src/test/java/org/opensearch/ad/indices/InitAnomalyDetectionIndicesTests.java @@ -29,7 +29,6 @@ import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.create.CreateIndexRequest; import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.util.DiscoveryNodeFilterer; @@ -44,9 +43,10 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.timeseries.constant.CommonName; -public class InitAnomalyDetectionIndicesTests extends AbstractADTest { +public class InitAnomalyDetectionIndicesTests extends AbstractTimeSeriesTest { Client client; ClusterService clusterService; ThreadPool threadPool; diff --git a/src/test/java/org/opensearch/ad/indices/RolloverTests.java b/src/test/java/org/opensearch/ad/indices/RolloverTests.java index 8806bb96c..77d8eeaf4 100644 --- a/src/test/java/org/opensearch/ad/indices/RolloverTests.java +++ b/src/test/java/org/opensearch/ad/indices/RolloverTests.java @@ -34,7 +34,6 @@ import org.opensearch.action.admin.indices.rollover.RolloverRequest; import org.opensearch.action.admin.indices.rollover.RolloverResponse; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.util.DiscoveryNodeFilterer; @@ -49,8 +48,9 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class RolloverTests extends AbstractADTest { +public class RolloverTests extends AbstractTimeSeriesTest { private AnomalyDetectionIndices adIndices; private IndicesAdminClient indicesClient; private ClusterAdminClient clusterAdminClient; diff --git a/src/test/java/org/opensearch/ad/indices/UpdateMappingTests.java b/src/test/java/org/opensearch/ad/indices/UpdateMappingTests.java index 380fb10c5..2c19432bf 100644 --- a/src/test/java/org/opensearch/ad/indices/UpdateMappingTests.java +++ b/src/test/java/org/opensearch/ad/indices/UpdateMappingTests.java @@ -40,7 +40,6 @@ import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse; import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.util.DiscoveryNodeFilterer; import org.opensearch.client.AdminClient; @@ -56,9 +55,10 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.timeseries.constant.CommonName; -public class UpdateMappingTests extends AbstractADTest { +public class UpdateMappingTests extends AbstractTimeSeriesTest { private static String resultIndexName; private AnomalyDetectionIndices adIndices; diff --git a/src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java b/src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java index 5ff411c73..d3c384b3e 100644 --- a/src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java +++ b/src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java @@ -33,11 +33,9 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.AnomalyDetectorPlugin; import org.opensearch.ad.MemoryTracker; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.model.AnomalyDetector; @@ -55,6 +53,8 @@ import org.opensearch.test.ClusterServiceUtils; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.dataprocessor.Imputer; import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; @@ -62,7 +62,7 @@ import com.google.common.collect.ImmutableList; -public class AbstractCosineDataTest extends AbstractADTest { +public class AbstractCosineDataTest extends AbstractTimeSeriesTest { int numMinSamples; String modelId; String entityName; diff --git a/src/test/java/org/opensearch/ad/ml/CheckpointDaoTests.java b/src/test/java/org/opensearch/ad/ml/CheckpointDaoTests.java index 804896ae4..7d74b55d7 100644 --- a/src/test/java/org/opensearch/ad/ml/CheckpointDaoTests.java +++ b/src/test/java/org/opensearch/ad/ml/CheckpointDaoTests.java @@ -498,7 +498,7 @@ public void test_restore() throws IOException { GetResponse getResponse = mock(GetResponse.class); when(getResponse.isExists()).thenReturn(true); Map source = new HashMap<>(); - source.put(CheckpointDao.DETECTOR_ID, state.getDetectorId()); + source.put(CheckpointDao.DETECTOR_ID, state.getId()); source.put(CheckpointDao.FIELD_MODELV2, checkpointDao.toCheckpoint(modelToSave, modelId).get()); source.put(CommonName.TIMESTAMP, "2020-10-11T22:58:23.610392Z"); when(getResponse.getSource()).thenReturn(source); diff --git a/src/test/java/org/opensearch/ad/ml/CheckpointDeleteTests.java b/src/test/java/org/opensearch/ad/ml/CheckpointDeleteTests.java index 0afdcc5b0..8e29e93ac 100644 --- a/src/test/java/org/opensearch/ad/ml/CheckpointDeleteTests.java +++ b/src/test/java/org/opensearch/ad/ml/CheckpointDeleteTests.java @@ -27,7 +27,6 @@ import org.mockito.Mock; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.ad.util.ClientUtil; @@ -36,6 +35,7 @@ import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.index.reindex.ScrollableHitSource; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper; import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestState; @@ -52,7 +52,7 @@ * class for tests requiring checking logs. * */ -public class CheckpointDeleteTests extends AbstractADTest { +public class CheckpointDeleteTests extends AbstractTimeSeriesTest { private enum DeleteExecutionMode { NORMAL, INDEX_NOT_FOUND, diff --git a/src/test/java/org/opensearch/ad/ml/EntityColdStarterTests.java b/src/test/java/org/opensearch/ad/ml/EntityColdStarterTests.java index dd9cb3b72..34265b0e6 100644 --- a/src/test/java/org/opensearch/ad/ml/EntityColdStarterTests.java +++ b/src/test/java/org/opensearch/ad/ml/EntityColdStarterTests.java @@ -44,7 +44,6 @@ import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; import org.opensearch.ad.MemoryTracker; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.ml.ModelManager.ModelType; import org.opensearch.ad.settings.ADEnabledSetting; @@ -55,6 +54,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.IntervalTimeConfiguration; @@ -477,7 +477,7 @@ public void testEmptyDataRange() throws InterruptedException { GetRequest request = invocation.getArgument(0); ActionListener listener = invocation.getArgument(2); - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); return null; }).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class)); @@ -589,7 +589,7 @@ private void accuracyTemplate(int detectorIntervalMins, float precisionThreshold GetRequest request = invocation.getArgument(0); ActionListener listener = invocation.getArgument(2); - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); return null; }).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class)); @@ -622,7 +622,7 @@ public int compare(Entry p1, Entry p2) { }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); EntityModel model = new EntityModel(entity, new ArrayDeque<>(), null); - modelState = new ModelState<>(model, modelId, detector.getDetectorId(), ModelType.ENTITY.getName(), clock, priority); + modelState = new ModelState<>(model, modelId, detector.getId(), ModelType.ENTITY.getName(), clock, priority); released = new AtomicBoolean(); @@ -632,7 +632,7 @@ public int compare(Entry p1, Entry p2) { inProgressLatch.countDown(); }); - entityColdStarter.trainModel(entity, detector.getDetectorId(), modelState, listener); + entityColdStarter.trainModel(entity, detector.getId(), modelState, listener); checkSemaphoreRelease(); assertTrue(model.getTrcf().isPresent()); diff --git a/src/test/java/org/opensearch/ad/ml/HCADModelPerfTests.java b/src/test/java/org/opensearch/ad/ml/HCADModelPerfTests.java index 2e096ac55..79a8afdac 100644 --- a/src/test/java/org/opensearch/ad/ml/HCADModelPerfTests.java +++ b/src/test/java/org/opensearch/ad/ml/HCADModelPerfTests.java @@ -36,7 +36,6 @@ import org.opensearch.action.get.GetResponse; import org.opensearch.ad.AnomalyDetectorPlugin; import org.opensearch.ad.MemoryTracker; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.ml.ModelManager.ModelType; @@ -45,6 +44,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.test.ClusterServiceUtils; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.settings.TimeSeriesSettings; @@ -100,7 +100,7 @@ private void averageAccuracyTemplate( doAnswer(invocation -> { ActionListener listener = invocation.getArgument(2); - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); return null; }).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class)); @@ -220,7 +220,7 @@ public int compare(Entry p1, Entry p2) { ModelState modelState = new ModelState<>( model, entity.getModelId(detectorId).get(), - detector.getDetectorId(), + detector.getId(), ModelType.ENTITY.getName(), clock, priority @@ -234,7 +234,7 @@ public int compare(Entry p1, Entry p2) { inProgressLatch.countDown(); }); - entityColdStarter.trainModel(entity, detector.getDetectorId(), modelState, listener); + entityColdStarter.trainModel(entity, detector.getId(), modelState, listener); checkSemaphoreRelease(); assertTrue(model.getTrcf().isPresent()); diff --git a/src/test/java/org/opensearch/ad/ml/ModelManagerTests.java b/src/test/java/org/opensearch/ad/ml/ModelManagerTests.java index f8eb9f6a8..78dd8a3f9 100644 --- a/src/test/java/org/opensearch/ad/ml/ModelManagerTests.java +++ b/src/test/java/org/opensearch/ad/ml/ModelManagerTests.java @@ -698,14 +698,14 @@ public void trainModel_throwLimitExceededToListener_whenLimitExceed() { @Test public void getRcfModelId_returnNonEmptyString() { - String rcfModelId = SingleStreamModelIdMapper.getRcfModelId(anomalyDetector.getDetectorId(), 0); + String rcfModelId = SingleStreamModelIdMapper.getRcfModelId(anomalyDetector.getId(), 0); assertFalse(rcfModelId.isEmpty()); } @Test public void getThresholdModelId_returnNonEmptyString() { - String thresholdModelId = SingleStreamModelIdMapper.getThresholdModelId(anomalyDetector.getDetectorId()); + String thresholdModelId = SingleStreamModelIdMapper.getThresholdModelId(anomalyDetector.getId()); assertFalse(thresholdModelId.isEmpty()); } diff --git a/src/test/java/org/opensearch/ad/mock/plugin/MockReindexPlugin.java b/src/test/java/org/opensearch/ad/mock/plugin/MockReindexPlugin.java index bdb0e55ba..29db051e0 100644 --- a/src/test/java/org/opensearch/ad/mock/plugin/MockReindexPlugin.java +++ b/src/test/java/org/opensearch/ad/mock/plugin/MockReindexPlugin.java @@ -27,7 +27,6 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.mock.transport.MockAnomalyDetectorJobAction; import org.opensearch.ad.mock.transport.MockAnomalyDetectorJobTransportActionWithUser; @@ -45,6 +44,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.SearchHit; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/mock/transport/MockADCancelTaskNodeRequest_1_0.java b/src/test/java/org/opensearch/ad/mock/transport/MockADCancelTaskNodeRequest_1_0.java index b924f4c59..fff9aa524 100644 --- a/src/test/java/org/opensearch/ad/mock/transport/MockADCancelTaskNodeRequest_1_0.java +++ b/src/test/java/org/opensearch/ad/mock/transport/MockADCancelTaskNodeRequest_1_0.java @@ -39,7 +39,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(userName); } - public String getDetectorId() { + public String getId() { return detectorId; } diff --git a/src/test/java/org/opensearch/ad/mock/transport/MockForwardADTaskRequest_1_0.java b/src/test/java/org/opensearch/ad/mock/transport/MockForwardADTaskRequest_1_0.java index 8d8136d32..610fbb1fd 100644 --- a/src/test/java/org/opensearch/ad/mock/transport/MockForwardADTaskRequest_1_0.java +++ b/src/test/java/org/opensearch/ad/mock/transport/MockForwardADTaskRequest_1_0.java @@ -61,7 +61,7 @@ public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (detector == null) { validationException = addValidationError(ADCommonMessages.DETECTOR_MISSING, validationException); - } else if (detector.getDetectorId() == null) { + } else if (detector.getId() == null) { validationException = addValidationError(ADCommonMessages.AD_ID_MISSING_MSG, validationException); } if (adTaskAction == null) { diff --git a/src/test/java/org/opensearch/ad/model/ADEntityTaskProfileTests.java b/src/test/java/org/opensearch/ad/model/ADEntityTaskProfileTests.java index 6addb18d2..cd5d7fc18 100644 --- a/src/test/java/org/opensearch/ad/model/ADEntityTaskProfileTests.java +++ b/src/test/java/org/opensearch/ad/model/ADEntityTaskProfileTests.java @@ -10,7 +10,6 @@ import java.util.TreeMap; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.common.io.stream.NamedWriteableRegistry; @@ -18,6 +17,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; public class ADEntityTaskProfileTests extends OpenSearchSingleNodeTestCase { diff --git a/src/test/java/org/opensearch/ad/model/ADTaskTests.java b/src/test/java/org/opensearch/ad/model/ADTaskTests.java index f6a807845..546a09f52 100644 --- a/src/test/java/org/opensearch/ad/model/ADTaskTests.java +++ b/src/test/java/org/opensearch/ad/model/ADTaskTests.java @@ -17,7 +17,6 @@ import java.util.Collection; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.common.io.stream.NamedWriteableRegistry; @@ -25,6 +24,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; public class ADTaskTests extends OpenSearchSingleNodeTestCase { diff --git a/src/test/java/org/opensearch/ad/model/AnomalyDetectorExecutionInputTests.java b/src/test/java/org/opensearch/ad/model/AnomalyDetectorExecutionInputTests.java index ccd1b32e2..d383aed3d 100644 --- a/src/test/java/org/opensearch/ad/model/AnomalyDetectorExecutionInputTests.java +++ b/src/test/java/org/opensearch/ad/model/AnomalyDetectorExecutionInputTests.java @@ -16,9 +16,9 @@ import java.time.temporal.ChronoUnit; import java.util.Locale; -import org.opensearch.ad.TestHelpers; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; public class AnomalyDetectorExecutionInputTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/ad/model/AnomalyDetectorJobTests.java b/src/test/java/org/opensearch/ad/model/AnomalyDetectorJobTests.java index f68771b9a..bb165e665 100644 --- a/src/test/java/org/opensearch/ad/model/AnomalyDetectorJobTests.java +++ b/src/test/java/org/opensearch/ad/model/AnomalyDetectorJobTests.java @@ -16,7 +16,6 @@ import java.util.Locale; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.common.io.stream.NamedWriteableRegistry; @@ -24,6 +23,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; public class AnomalyDetectorJobTests extends OpenSearchSingleNodeTestCase { diff --git a/src/test/java/org/opensearch/ad/model/AnomalyDetectorSerializationTests.java b/src/test/java/org/opensearch/ad/model/AnomalyDetectorSerializationTests.java index c677f7b0c..2d9f5baf9 100644 --- a/src/test/java/org/opensearch/ad/model/AnomalyDetectorSerializationTests.java +++ b/src/test/java/org/opensearch/ad/model/AnomalyDetectorSerializationTests.java @@ -16,13 +16,13 @@ import java.util.Collection; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/model/AnomalyDetectorTests.java b/src/test/java/org/opensearch/ad/model/AnomalyDetectorTests.java index 7b065c86b..d3298eae2 100644 --- a/src/test/java/org/opensearch/ad/model/AnomalyDetectorTests.java +++ b/src/test/java/org/opensearch/ad/model/AnomalyDetectorTests.java @@ -22,13 +22,13 @@ import java.util.Locale; import java.util.concurrent.TimeUnit; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.ValidationException; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.settings.TimeSeriesSettings; @@ -36,7 +36,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -public class AnomalyDetectorTests extends AbstractADTest { +public class AnomalyDetectorTests extends AbstractTimeSeriesTest { public void testParseAnomalyDetector() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), Instant.now()); @@ -64,7 +64,7 @@ public void testParseAnomalyDetectorWithCustomIndex() throws IOException { detectorString = detectorString .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); AnomalyDetector parsedDetector = AnomalyDetector.parse(TestHelpers.parser(detectorString)); - assertEquals("Parsing result index doesn't work", resultIndex, parsedDetector.getResultIndex()); + assertEquals("Parsing result index doesn't work", resultIndex, parsedDetector.getCustomResultIndex()); assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); } @@ -104,13 +104,7 @@ public void testParseAnomalyDetectorWithCustomDetectionDelay() throws IOExceptio detectorString = detectorString .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); AnomalyDetector parsedDetector = AnomalyDetector - .parse( - TestHelpers.parser(detectorString), - detector.getDetectorId(), - detector.getVersion(), - detectionInterval, - detectionWindowDelay - ); + .parse(TestHelpers.parser(detectorString), detector.getId(), detector.getVersion(), detectionInterval, detectionWindowDelay); assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); } @@ -321,7 +315,8 @@ public void testInvalidShingleSize() throws Exception { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ) ); } @@ -347,7 +342,8 @@ public void testNullDetectorName() throws Exception { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ) ); } @@ -373,7 +369,8 @@ public void testBlankDetectorName() throws Exception { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ) ); } @@ -399,7 +396,8 @@ public void testNullTimeField() throws Exception { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ) ); } @@ -425,7 +423,8 @@ public void testNullIndices() throws Exception { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ) ); } @@ -451,7 +450,8 @@ public void testEmptyIndices() throws Exception { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ) ); } @@ -477,7 +477,8 @@ public void testNullDetectionInterval() throws Exception { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ) ); } @@ -502,7 +503,8 @@ public void testInvalidDetectionInterval() { Instant.now(), null, null, - null + null, + TestHelpers.randomImputationOption() ) ); assertEquals("Detection interval must be a positive integer", exception.getMessage()); @@ -528,7 +530,8 @@ public void testInvalidWindowDelay() { Instant.now(), null, null, - null + null, + TestHelpers.randomImputationOption() ) ); assertEquals("Interval -1 should be non-negative", exception.getMessage()); @@ -567,7 +570,8 @@ public void testGetShingleSize() throws IOException { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ); assertEquals((int) anomalyDetector.getShingleSize(), 5); } @@ -590,7 +594,8 @@ public void testGetShingleSizeReturnsDefaultValue() throws IOException { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ); assertEquals((int) anomalyDetector.getShingleSize(), TimeSeriesSettings.DEFAULT_SHINGLE_SIZE); } @@ -613,27 +618,49 @@ public void testNullFeatureAttributes() throws IOException { Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ); assertNotNull(anomalyDetector.getFeatureAttributes()); assertEquals(0, anomalyDetector.getFeatureAttributes().size()); } - public void testValidateResultIndex() { - String errorMessage = AnomalyDetector.validateResultIndex("abc"); + public void testValidateResultIndex() throws IOException { + AnomalyDetector anomalyDetector = new AnomalyDetector( + randomAlphaOfLength(5), + randomLong(), + randomAlphaOfLength(5), + randomAlphaOfLength(5), + randomAlphaOfLength(5), + ImmutableList.of(randomAlphaOfLength(5)), + ImmutableList.of(TestHelpers.randomFeature()), + TestHelpers.randomQuery(), + TestHelpers.randomIntervalTimeConfiguration(), + TestHelpers.randomIntervalTimeConfiguration(), + null, + null, + 1, + Instant.now(), + null, + TestHelpers.randomUser(), + null, + TestHelpers.randomImputationOption() + ); + + String errorMessage = anomalyDetector.validateCustomResultIndex("abc"); assertEquals(INVALID_RESULT_INDEX_PREFIX, errorMessage); StringBuilder resultIndexNameBuilder = new StringBuilder(CUSTOM_RESULT_INDEX_PREFIX); for (int i = 0; i < MAX_RESULT_INDEX_NAME_SIZE - CUSTOM_RESULT_INDEX_PREFIX.length(); i++) { resultIndexNameBuilder.append("a"); } - assertNull(AnomalyDetector.validateResultIndex(resultIndexNameBuilder.toString())); + assertNull(anomalyDetector.validateCustomResultIndex(resultIndexNameBuilder.toString())); resultIndexNameBuilder.append("a"); - errorMessage = AnomalyDetector.validateResultIndex(resultIndexNameBuilder.toString()); + errorMessage = anomalyDetector.validateCustomResultIndex(resultIndexNameBuilder.toString()); assertEquals(AnomalyDetector.INVALID_RESULT_INDEX_NAME_SIZE, errorMessage); - errorMessage = AnomalyDetector.validateResultIndex(CUSTOM_RESULT_INDEX_PREFIX + "abc#"); + errorMessage = anomalyDetector.validateCustomResultIndex(CUSTOM_RESULT_INDEX_PREFIX + "abc#"); assertEquals(INVALID_CHAR_IN_RESULT_INDEX_NAME, errorMessage); } diff --git a/src/test/java/org/opensearch/ad/model/AnomalyResultBucketTests.java b/src/test/java/org/opensearch/ad/model/AnomalyResultBucketTests.java index 6e705a8a9..ba4f2084b 100644 --- a/src/test/java/org/opensearch/ad/model/AnomalyResultBucketTests.java +++ b/src/test/java/org/opensearch/ad/model/AnomalyResultBucketTests.java @@ -11,16 +11,16 @@ import java.util.HashMap; import java.util.Map; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; -public class AnomalyResultBucketTests extends AbstractADTest { +public class AnomalyResultBucketTests extends AbstractTimeSeriesTest { public void testSerializeAnomalyResultBucket() throws IOException { AnomalyResultBucket anomalyResultBucket = TestHelpers.randomAnomalyResultBucket(); diff --git a/src/test/java/org/opensearch/ad/model/AnomalyResultTests.java b/src/test/java/org/opensearch/ad/model/AnomalyResultTests.java index ca5ea2bb7..59cc71861 100644 --- a/src/test/java/org/opensearch/ad/model/AnomalyResultTests.java +++ b/src/test/java/org/opensearch/ad/model/AnomalyResultTests.java @@ -18,7 +18,6 @@ import java.util.Locale; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.common.io.stream.NamedWriteableRegistry; @@ -26,6 +25,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.base.Objects; @@ -70,7 +70,7 @@ public void testParseAnomalyDetectorWithoutNormalResult() throws IOException { .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); AnomalyResult parsedDetectResult = AnomalyResult.parse(TestHelpers.parser(detectResultString)); assertTrue( - Objects.equal(detectResult.getDetectorId(), parsedDetectResult.getDetectorId()) + Objects.equal(detectResult.getId(), parsedDetectResult.getId()) && Objects.equal(detectResult.getTaskId(), parsedDetectResult.getTaskId()) && Objects.equal(detectResult.getAnomalyScore(), parsedDetectResult.getAnomalyScore()) && Objects.equal(detectResult.getAnomalyGrade(), parsedDetectResult.getAnomalyGrade()) @@ -95,7 +95,7 @@ public void testParseAnomalyDetectorWithNanAnomalyResult() throws IOException { assertNull(parsedDetectResult.getAnomalyGrade()); assertNull(parsedDetectResult.getAnomalyScore()); assertTrue( - Objects.equal(detectResult.getDetectorId(), parsedDetectResult.getDetectorId()) + Objects.equal(detectResult.getId(), parsedDetectResult.getId()) && Objects.equal(detectResult.getTaskId(), parsedDetectResult.getTaskId()) && Objects.equal(detectResult.getFeatureData(), parsedDetectResult.getFeatureData()) && Objects.equal(detectResult.getDataStartTime(), parsedDetectResult.getDataStartTime()) diff --git a/src/test/java/org/opensearch/ad/model/DetectionDateRangeTests.java b/src/test/java/org/opensearch/ad/model/DetectionDateRangeTests.java index e12252ab0..dc563d38c 100644 --- a/src/test/java/org/opensearch/ad/model/DetectionDateRangeTests.java +++ b/src/test/java/org/opensearch/ad/model/DetectionDateRangeTests.java @@ -18,7 +18,6 @@ import java.util.Locale; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.common.io.stream.NamedWriteableRegistry; @@ -26,6 +25,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.DateRange; public class DetectionDateRangeTests extends OpenSearchSingleNodeTestCase { diff --git a/src/test/java/org/opensearch/ad/model/DetectorInternalStateTests.java b/src/test/java/org/opensearch/ad/model/DetectorInternalStateTests.java index e19cd2b27..2ea993b72 100644 --- a/src/test/java/org/opensearch/ad/model/DetectorInternalStateTests.java +++ b/src/test/java/org/opensearch/ad/model/DetectorInternalStateTests.java @@ -8,9 +8,9 @@ import java.io.IOException; import java.time.Instant; -import org.opensearch.ad.TestHelpers; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; public class DetectorInternalStateTests extends OpenSearchSingleNodeTestCase { diff --git a/src/test/java/org/opensearch/ad/model/DetectorProfileTests.java b/src/test/java/org/opensearch/ad/model/DetectorProfileTests.java index dfc4c1338..b276d212e 100644 --- a/src/test/java/org/opensearch/ad/model/DetectorProfileTests.java +++ b/src/test/java/org/opensearch/ad/model/DetectorProfileTests.java @@ -14,13 +14,13 @@ import java.io.IOException; import java.util.Map; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; public class DetectorProfileTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/ad/model/EntityAnomalyResultTests.java b/src/test/java/org/opensearch/ad/model/EntityAnomalyResultTests.java index 2713e2c98..24cb0c879 100644 --- a/src/test/java/org/opensearch/ad/model/EntityAnomalyResultTests.java +++ b/src/test/java/org/opensearch/ad/model/EntityAnomalyResultTests.java @@ -12,7 +12,7 @@ package org.opensearch.ad.model; import static java.util.Arrays.asList; -import static org.opensearch.ad.TestHelpers.randomHCADAnomalyDetectResult; +import static org.opensearch.timeseries.TestHelpers.randomHCADAnomalyDetectResult; import java.util.ArrayList; import java.util.List; diff --git a/src/test/java/org/opensearch/ad/model/EntityProfileTests.java b/src/test/java/org/opensearch/ad/model/EntityProfileTests.java index d85c515ac..caa45a2d8 100644 --- a/src/test/java/org/opensearch/ad/model/EntityProfileTests.java +++ b/src/test/java/org/opensearch/ad/model/EntityProfileTests.java @@ -15,16 +15,16 @@ import java.io.IOException; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.common.exception.JsonPathNotFoundException; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.common.Strings; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import test.org.opensearch.ad.util.JsonDeserializer; -public class EntityProfileTests extends AbstractADTest { +public class EntityProfileTests extends AbstractTimeSeriesTest { public void testMerge() { EntityProfile profile1 = new EntityProfile(null, -1, -1, null, null, EntityState.INIT); EntityProfile profile2 = new EntityProfile(null, -1, -1, null, null, EntityState.UNKNOWN); diff --git a/src/test/java/org/opensearch/ad/model/EntityTests.java b/src/test/java/org/opensearch/ad/model/EntityTests.java index f3affd6c1..fe02afb0e 100644 --- a/src/test/java/org/opensearch/ad/model/EntityTests.java +++ b/src/test/java/org/opensearch/ad/model/EntityTests.java @@ -15,9 +15,9 @@ import java.util.Optional; import java.util.TreeMap; -import org.opensearch.ad.AbstractADTest; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class EntityTests extends AbstractADTest { +public class EntityTests extends AbstractTimeSeriesTest { /** * Test that toStrign has no random string, but only attributes */ diff --git a/src/test/java/org/opensearch/ad/model/FeatureDataTests.java b/src/test/java/org/opensearch/ad/model/FeatureDataTests.java index 2b53fdbb8..bf1790b99 100644 --- a/src/test/java/org/opensearch/ad/model/FeatureDataTests.java +++ b/src/test/java/org/opensearch/ad/model/FeatureDataTests.java @@ -14,9 +14,9 @@ import java.io.IOException; import java.util.Locale; -import org.opensearch.ad.TestHelpers; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; public class FeatureDataTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/ad/model/FeatureTests.java b/src/test/java/org/opensearch/ad/model/FeatureTests.java index 56e643763..bc3baafe8 100644 --- a/src/test/java/org/opensearch/ad/model/FeatureTests.java +++ b/src/test/java/org/opensearch/ad/model/FeatureTests.java @@ -14,9 +14,9 @@ import java.io.IOException; import java.util.Locale; -import org.opensearch.ad.TestHelpers; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.Feature; public class FeatureTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/ad/model/IntervalTimeConfigurationTests.java b/src/test/java/org/opensearch/ad/model/IntervalTimeConfigurationTests.java index b01c9f0da..970d9fd89 100644 --- a/src/test/java/org/opensearch/ad/model/IntervalTimeConfigurationTests.java +++ b/src/test/java/org/opensearch/ad/model/IntervalTimeConfigurationTests.java @@ -16,9 +16,9 @@ import java.time.temporal.ChronoUnit; import java.util.Locale; -import org.opensearch.ad.TestHelpers; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.model.TimeConfiguration; diff --git a/src/test/java/org/opensearch/ad/model/MergeableListTests.java b/src/test/java/org/opensearch/ad/model/MergeableListTests.java index 79b3f43bc..f9d794da6 100644 --- a/src/test/java/org/opensearch/ad/model/MergeableListTests.java +++ b/src/test/java/org/opensearch/ad/model/MergeableListTests.java @@ -14,9 +14,9 @@ import java.util.ArrayList; import java.util.List; -import org.opensearch.ad.AbstractADTest; +import org.opensearch.timeseries.AbstractTimeSeriesTest; -public class MergeableListTests extends AbstractADTest { +public class MergeableListTests extends AbstractTimeSeriesTest { public void testMergeableListGetElements() { List ls1 = new ArrayList(); diff --git a/src/test/java/org/opensearch/ad/model/ModelProfileTests.java b/src/test/java/org/opensearch/ad/model/ModelProfileTests.java index 28920d3ba..716e86637 100644 --- a/src/test/java/org/opensearch/ad/model/ModelProfileTests.java +++ b/src/test/java/org/opensearch/ad/model/ModelProfileTests.java @@ -15,15 +15,15 @@ import java.io.IOException; -import org.opensearch.ad.AbstractADTest; import org.opensearch.common.Strings; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.timeseries.constant.CommonName; import test.org.opensearch.ad.util.JsonDeserializer; -public class ModelProfileTests extends AbstractADTest { +public class ModelProfileTests extends AbstractTimeSeriesTest { public void testToXContent() throws IOException { ModelProfile profile1 = new ModelProfile( diff --git a/src/test/java/org/opensearch/ad/plugin/MockReindexPlugin.java b/src/test/java/org/opensearch/ad/plugin/MockReindexPlugin.java index 74adb46c8..079a93c9f 100644 --- a/src/test/java/org/opensearch/ad/plugin/MockReindexPlugin.java +++ b/src/test/java/org/opensearch/ad/plugin/MockReindexPlugin.java @@ -28,7 +28,6 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.client.Client; import org.opensearch.common.inject.Inject; @@ -44,6 +43,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.SearchHit; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/ratelimit/AbstractRateLimitingTest.java b/src/test/java/org/opensearch/ad/ratelimit/AbstractRateLimitingTest.java index e29865541..e29e13a43 100644 --- a/src/test/java/org/opensearch/ad/ratelimit/AbstractRateLimitingTest.java +++ b/src/test/java/org/opensearch/ad/ratelimit/AbstractRateLimitingTest.java @@ -22,14 +22,14 @@ import java.util.Optional; import org.opensearch.action.ActionListener; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.Entity; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; -public class AbstractRateLimitingTest extends AbstractADTest { +public class AbstractRateLimitingTest extends AbstractTimeSeriesTest { Clock clock; AnomalyDetector detector; NodeStateManager nodeStateManager; diff --git a/src/test/java/org/opensearch/ad/ratelimit/CheckpointReadWorkerTests.java b/src/test/java/org/opensearch/ad/ratelimit/CheckpointReadWorkerTests.java index a87720d0c..41e1346a3 100644 --- a/src/test/java/org/opensearch/ad/ratelimit/CheckpointReadWorkerTests.java +++ b/src/test/java/org/opensearch/ad/ratelimit/CheckpointReadWorkerTests.java @@ -47,7 +47,6 @@ import org.opensearch.action.get.MultiGetItemResponse; import org.opensearch.action.get.MultiGetResponse; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; @@ -74,6 +73,7 @@ import org.opensearch.rest.RestStatus; import org.opensearch.threadpool.ThreadPoolStats; import org.opensearch.threadpool.ThreadPoolStats.Stats; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.LimitExceededException; import org.opensearch.timeseries.stats.StatNames; @@ -711,10 +711,10 @@ public void testChangePriority() { } public void testDetectorId() { - assertEquals(detectorId, request.getDetectorId()); + assertEquals(detectorId, request.getId()); String newDetectorId = "456"; request.setDetectorId(newDetectorId); - assertEquals(newDetectorId, request.getDetectorId()); + assertEquals(newDetectorId, request.getId()); } @SuppressWarnings("unchecked") diff --git a/src/test/java/org/opensearch/ad/ratelimit/CheckpointWriteWorkerTests.java b/src/test/java/org/opensearch/ad/ratelimit/CheckpointWriteWorkerTests.java index 1408a80f3..a3afda641 100644 --- a/src/test/java/org/opensearch/ad/ratelimit/CheckpointWriteWorkerTests.java +++ b/src/test/java/org/opensearch/ad/ratelimit/CheckpointWriteWorkerTests.java @@ -266,7 +266,7 @@ public void testOverloaded() { worker.write(state, true, RequestPriority.MEDIUM); verify(checkpoint, times(1)).batchWrite(any(), any()); - verify(nodeStateManager, times(1)).setException(eq(state.getDetectorId()), any(OpenSearchRejectedExecutionException.class)); + verify(nodeStateManager, times(1)).setException(eq(state.getId()), any(OpenSearchRejectedExecutionException.class)); } public void testRetryException() { @@ -280,7 +280,7 @@ public void testRetryException() { worker.write(state, true, RequestPriority.MEDIUM); // we don't retry checkpoint write verify(checkpoint, times(1)).batchWrite(any(), any()); - verify(nodeStateManager, times(1)).setException(eq(state.getDetectorId()), any(OpenSearchStatusException.class)); + verify(nodeStateManager, times(1)).setException(eq(state.getId()), any(OpenSearchStatusException.class)); } /** @@ -353,7 +353,7 @@ public void testEmptyModelId() { when(state.getLastCheckpointTime()).thenReturn(Instant.now()); EntityModel model = mock(EntityModel.class); when(state.getModel()).thenReturn(model); - when(state.getDetectorId()).thenReturn("1"); + when(state.getId()).thenReturn("1"); when(state.getModelId()).thenReturn(null); worker.write(state, true, RequestPriority.MEDIUM); @@ -366,7 +366,7 @@ public void testEmptyDetectorId() { when(state.getLastCheckpointTime()).thenReturn(Instant.now()); EntityModel model = mock(EntityModel.class); when(state.getModel()).thenReturn(model); - when(state.getDetectorId()).thenReturn(null); + when(state.getId()).thenReturn(null); when(state.getModelId()).thenReturn("a"); worker.write(state, true, RequestPriority.MEDIUM); diff --git a/src/test/java/org/opensearch/ad/ratelimit/ResultWriteWorkerTests.java b/src/test/java/org/opensearch/ad/ratelimit/ResultWriteWorkerTests.java index 05e0d60ab..0b548a1e1 100644 --- a/src/test/java/org/opensearch/ad/ratelimit/ResultWriteWorkerTests.java +++ b/src/test/java/org/opensearch/ad/ratelimit/ResultWriteWorkerTests.java @@ -34,7 +34,6 @@ import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.index.IndexRequest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyResult; @@ -50,6 +49,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestStatus; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; public class ResultWriteWorkerTests extends AbstractRateLimitingTest { ResultWriteWorker resultWriteQueue; diff --git a/src/test/java/org/opensearch/ad/rest/ADRestTestUtils.java b/src/test/java/org/opensearch/ad/rest/ADRestTestUtils.java index 054ef12aa..728f417b0 100644 --- a/src/test/java/org/opensearch/ad/rest/ADRestTestUtils.java +++ b/src/test/java/org/opensearch/ad/rest/ADRestTestUtils.java @@ -36,7 +36,6 @@ import org.apache.hc.core5.http.message.BasicHeader; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.Logger; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.mock.model.MockSimpleLog; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskProfile; @@ -45,6 +44,7 @@ import org.opensearch.client.Response; import org.opensearch.client.RestClient; import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.IntervalTimeConfiguration; @@ -210,7 +210,8 @@ public static Response createAnomalyDetector( now, categoryFields, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ); if (historical) { diff --git a/src/test/java/org/opensearch/ad/rest/AnomalyDetectorRestApiIT.java b/src/test/java/org/opensearch/ad/rest/AnomalyDetectorRestApiIT.java index 024276193..76a174956 100644 --- a/src/test/java/org/opensearch/ad/rest/AnomalyDetectorRestApiIT.java +++ b/src/test/java/org/opensearch/ad/rest/AnomalyDetectorRestApiIT.java @@ -33,7 +33,6 @@ import org.junit.Assert; import org.opensearch.ad.AnomalyDetectorPlugin; import org.opensearch.ad.AnomalyDetectorRestTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyDetector; @@ -50,6 +49,7 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.rest.RestStatus; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.DateRange; @@ -144,7 +144,8 @@ public void testCreateAnomalyDetectorWithDuplicateName() throws Exception { null, null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ); TestHelpers @@ -208,7 +209,7 @@ public void testUpdateAnomalyDetectorCategoryField() throws Exception { detector.getIndices(), detector.getFeatureAttributes(), detector.getFilterQuery(), - detector.getDetectionInterval(), + detector.getInterval(), detector.getWindowDelay(), detector.getShingleSize(), detector.getUiMetadata(), @@ -216,7 +217,8 @@ public void testUpdateAnomalyDetectorCategoryField() throws Exception { detector.getLastUpdateTime(), ImmutableList.of(randomAlphaOfLength(5)), detector.getUser(), - null + null, + TestHelpers.randomImputationOption() ); Exception ex = expectThrows( ResponseException.class, @@ -238,12 +240,12 @@ public void testGetAnomalyDetector() throws Exception { updateClusterSettings(ADEnabledSetting.AD_ENABLED, false); - Exception ex = expectThrows(ResponseException.class, () -> getAnomalyDetector(detector.getDetectorId(), client())); + Exception ex = expectThrows(ResponseException.class, () -> getAnomalyDetector(detector.getId(), client())); assertThat(ex.getMessage(), containsString(ADCommonMessages.DISABLED_ERR_MSG)); updateClusterSettings(ADEnabledSetting.AD_ENABLED, true); - AnomalyDetector createdDetector = getAnomalyDetector(detector.getDetectorId(), client()); + AnomalyDetector createdDetector = getAnomalyDetector(detector.getId(), client()); assertEquals("Incorrect Location header", detector, createdDetector); } @@ -256,7 +258,7 @@ public void testUpdateAnomalyDetector() throws Exception { AnomalyDetector detector = createAnomalyDetector(createIndexAndGetAnomalyDetector(INDEX_NAME), true, client()); String newDescription = randomAlphaOfLength(5); AnomalyDetector newDetector = new AnomalyDetector( - detector.getDetectorId(), + detector.getId(), detector.getVersion(), detector.getName(), newDescription, @@ -264,7 +266,7 @@ public void testUpdateAnomalyDetector() throws Exception { detector.getIndices(), detector.getFeatureAttributes(), detector.getFilterQuery(), - detector.getDetectionInterval(), + detector.getInterval(), detector.getWindowDelay(), detector.getShingleSize(), detector.getUiMetadata(), @@ -272,7 +274,8 @@ public void testUpdateAnomalyDetector() throws Exception { detector.getLastUpdateTime(), null, detector.getUser(), - null + null, + TestHelpers.randomImputationOption() ); updateClusterSettings(ADEnabledSetting.AD_ENABLED, false); @@ -283,7 +286,7 @@ public void testUpdateAnomalyDetector() throws Exception { .makeRequest( client(), "PUT", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "?refresh=true", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "?refresh=true", ImmutableMap.of(), TestHelpers.toHttpEntity(newDetector), null @@ -297,7 +300,7 @@ public void testUpdateAnomalyDetector() throws Exception { .makeRequest( client(), "PUT", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "?refresh=true", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "?refresh=true", ImmutableMap.of(), TestHelpers.toHttpEntity(newDetector), null @@ -305,10 +308,10 @@ public void testUpdateAnomalyDetector() throws Exception { assertEquals("Update anomaly detector failed", RestStatus.OK, TestHelpers.restStatus(updateResponse)); Map responseBody = entityAsMap(updateResponse); - assertEquals("Updated anomaly detector id doesn't match", detector.getDetectorId(), responseBody.get("_id")); + assertEquals("Updated anomaly detector id doesn't match", detector.getId(), responseBody.get("_id")); assertEquals("Version not incremented", (detector.getVersion().intValue() + 1), (int) responseBody.get("_version")); - AnomalyDetector updatedDetector = getAnomalyDetector(detector.getDetectorId(), client()); + AnomalyDetector updatedDetector = getAnomalyDetector(detector.getId(), client()); assertNotEquals("Anomaly detector last update time not changed", updatedDetector.getLastUpdateTime(), detector.getLastUpdateTime()); assertEquals("Anomaly detector description not updated", newDescription, updatedDetector.getDescription()); } @@ -317,7 +320,7 @@ public void testUpdateAnomalyDetectorNameToExisting() throws Exception { AnomalyDetector detector1 = createIndexAndGetAnomalyDetector("index-test-one"); AnomalyDetector detector2 = createIndexAndGetAnomalyDetector("index-test-two"); AnomalyDetector newDetector1WithDetector2Name = new AnomalyDetector( - detector1.getDetectorId(), + detector1.getId(), detector1.getVersion(), detector2.getName(), detector1.getDescription(), @@ -325,7 +328,7 @@ public void testUpdateAnomalyDetectorNameToExisting() throws Exception { detector1.getIndices(), detector1.getFeatureAttributes(), detector1.getFilterQuery(), - detector1.getDetectionInterval(), + detector1.getInterval(), detector1.getWindowDelay(), detector1.getShingleSize(), detector1.getUiMetadata(), @@ -333,7 +336,8 @@ public void testUpdateAnomalyDetectorNameToExisting() throws Exception { detector1.getLastUpdateTime(), null, detector1.getUser(), - null + null, + TestHelpers.randomImputationOption() ); TestHelpers @@ -355,7 +359,7 @@ public void testUpdateAnomalyDetectorNameToExisting() throws Exception { public void testUpdateAnomalyDetectorNameToNew() throws Exception { AnomalyDetector detector = createAnomalyDetector(createIndexAndGetAnomalyDetector(INDEX_NAME), true, client()); AnomalyDetector detectorWithNewName = new AnomalyDetector( - detector.getDetectorId(), + detector.getId(), detector.getVersion(), randomAlphaOfLength(5), detector.getDescription(), @@ -363,7 +367,7 @@ public void testUpdateAnomalyDetectorNameToNew() throws Exception { detector.getIndices(), detector.getFeatureAttributes(), detector.getFilterQuery(), - detector.getDetectionInterval(), + detector.getInterval(), detector.getWindowDelay(), detector.getShingleSize(), detector.getUiMetadata(), @@ -371,22 +375,23 @@ public void testUpdateAnomalyDetectorNameToNew() throws Exception { Instant.now(), null, detector.getUser(), - null + null, + TestHelpers.randomImputationOption() ); TestHelpers .makeRequest( client(), "PUT", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "?refresh=true", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "?refresh=true", ImmutableMap.of(), TestHelpers.toHttpEntity(detectorWithNewName), null ); - AnomalyDetector resultDetector = getAnomalyDetector(detectorWithNewName.getDetectorId(), client()); + AnomalyDetector resultDetector = getAnomalyDetector(detectorWithNewName.getId(), client()); assertEquals("Detector name updating failed", detectorWithNewName.getName(), resultDetector.getName()); - assertEquals("Updated anomaly detector id doesn't match", detectorWithNewName.getDetectorId(), resultDetector.getDetectorId()); + assertEquals("Updated anomaly detector id doesn't match", detectorWithNewName.getId(), resultDetector.getId()); assertNotEquals( "Anomaly detector last update time not changed", detectorWithNewName.getLastUpdateTime(), @@ -400,7 +405,7 @@ public void testUpdateAnomalyDetectorWithNotExistingIndex() throws Exception { String newDescription = randomAlphaOfLength(5); AnomalyDetector newDetector = new AnomalyDetector( - detector.getDetectorId(), + detector.getId(), detector.getVersion(), detector.getName(), newDescription, @@ -408,7 +413,7 @@ public void testUpdateAnomalyDetectorWithNotExistingIndex() throws Exception { detector.getIndices(), detector.getFeatureAttributes(), detector.getFilterQuery(), - detector.getDetectionInterval(), + detector.getInterval(), detector.getWindowDelay(), detector.getShingleSize(), detector.getUiMetadata(), @@ -416,7 +421,8 @@ public void testUpdateAnomalyDetectorWithNotExistingIndex() throws Exception { detector.getLastUpdateTime(), null, detector.getUser(), - null + null, + TestHelpers.randomImputationOption() ); deleteIndexWithAdminClient(CommonName.CONFIG_INDEX); @@ -429,7 +435,7 @@ public void testUpdateAnomalyDetectorWithNotExistingIndex() throws Exception { .makeRequest( client(), "PUT", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId(), + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId(), ImmutableMap.of(), TestHelpers.toHttpEntity(newDetector), null @@ -439,7 +445,7 @@ public void testUpdateAnomalyDetectorWithNotExistingIndex() throws Exception { public void testSearchAnomalyDetector() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); - SearchSourceBuilder search = (new SearchSourceBuilder()).query(QueryBuilders.termQuery("_id", detector.getDetectorId())); + SearchSourceBuilder search = (new SearchSourceBuilder()).query(QueryBuilders.termQuery("_id", detector.getId())); updateClusterSettings(ADEnabledSetting.AD_ENABLED, false); @@ -490,7 +496,7 @@ public void testStatsAnomalyDetector() throws Exception { public void testPreviewAnomalyDetector() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( - detector.getDetectorId(), + detector.getId(), Instant.now().minusSeconds(60 * 10), Instant.now(), null @@ -574,7 +580,7 @@ public void testExecuteAnomalyDetectorWithNullDetectorId() throws Exception { public void testPreviewAnomalyDetectorWithDetector() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( - detector.getDetectorId(), + detector.getId(), Instant.now().minusSeconds(60 * 10), Instant.now(), detector @@ -595,7 +601,7 @@ public void testPreviewAnomalyDetectorWithDetector() throws Exception { public void testPreviewAnomalyDetectorWithDetectorAndNoFeatures() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( - detector.getDetectorId(), + detector.getId(), Instant.now().minusSeconds(60 * 10), Instant.now(), TestHelpers.randomAnomalyDetectorWithEmptyFeature() @@ -630,8 +636,7 @@ public void testSearchAnomalyResult() throws Exception { ); assertEquals("Post anomaly result failed", RestStatus.CREATED, TestHelpers.restStatus(response)); - SearchSourceBuilder search = (new SearchSourceBuilder()) - .query(QueryBuilders.termQuery("detector_id", anomalyResult.getDetectorId())); + SearchSourceBuilder search = (new SearchSourceBuilder()).query(QueryBuilders.termQuery("detector_id", anomalyResult.getId())); updateClusterSettings(ADEnabledSetting.AD_ENABLED, false); @@ -683,27 +688,13 @@ public void testDeleteAnomalyDetector() throws Exception { Exception ex = expectThrows( ResponseException.class, () -> TestHelpers - .makeRequest( - client(), - "DELETE", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId(), - ImmutableMap.of(), - "", - null - ) + .makeRequest(client(), "DELETE", TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId(), ImmutableMap.of(), "", null) ); assertThat(ex.getMessage(), containsString(ADCommonMessages.DISABLED_ERR_MSG)); updateClusterSettings(ADEnabledSetting.AD_ENABLED, true); Response response = TestHelpers - .makeRequest( - client(), - "DELETE", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId(), - ImmutableMap.of(), - "", - null - ); + .makeRequest(client(), "DELETE", TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId(), ImmutableMap.of(), "", null); assertEquals("Delete anomaly detector failed", RestStatus.OK, TestHelpers.restStatus(response)); } @@ -726,14 +717,7 @@ public void testDeleteAnomalyDetectorWhichNotExist() throws Exception { public void testDeleteAnomalyDetectorWithNoAdJob() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, false, client()); Response response = TestHelpers - .makeRequest( - client(), - "DELETE", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId(), - ImmutableMap.of(), - "", - null - ); + .makeRequest(client(), "DELETE", TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId(), ImmutableMap.of(), "", null); assertEquals("Delete anomaly detector failed", RestStatus.OK, TestHelpers.restStatus(response)); } @@ -743,7 +727,7 @@ public void testDeleteAnomalyDetectorWithRunningAdJob() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -759,7 +743,7 @@ public void testDeleteAnomalyDetectorWithRunningAdJob() throws Exception { .makeRequest( client(), "DELETE", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId(), + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId(), ImmutableMap.of(), "", null @@ -773,7 +757,7 @@ public void testUpdateAnomalyDetectorWithRunningAdJob() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -784,7 +768,7 @@ public void testUpdateAnomalyDetectorWithRunningAdJob() throws Exception { String newDescription = randomAlphaOfLength(5); AnomalyDetector newDetector = new AnomalyDetector( - detector.getDetectorId(), + detector.getId(), detector.getVersion(), detector.getName(), newDescription, @@ -792,7 +776,7 @@ public void testUpdateAnomalyDetectorWithRunningAdJob() throws Exception { detector.getIndices(), detector.getFeatureAttributes(), detector.getFilterQuery(), - detector.getDetectionInterval(), + detector.getInterval(), detector.getWindowDelay(), detector.getShingleSize(), detector.getUiMetadata(), @@ -800,7 +784,8 @@ public void testUpdateAnomalyDetectorWithRunningAdJob() throws Exception { detector.getLastUpdateTime(), null, detector.getUser(), - null + null, + TestHelpers.randomImputationOption() ); TestHelpers @@ -811,7 +796,7 @@ public void testUpdateAnomalyDetectorWithRunningAdJob() throws Exception { .makeRequest( client(), "PUT", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId(), + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId(), ImmutableMap.of(), TestHelpers.toHttpEntity(newDetector), null @@ -825,7 +810,7 @@ public void testGetDetectorWithAdJob() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -833,12 +818,12 @@ public void testGetDetectorWithAdJob() throws Exception { assertEquals("Fail to start AD job", RestStatus.OK, TestHelpers.restStatus(startAdJobResponse)); - ToXContentObject[] results = getAnomalyDetector(detector.getDetectorId(), true, client()); + ToXContentObject[] results = getAnomalyDetector(detector.getId(), true, client()); assertEquals("Incorrect Location header", detector, results[0]); - assertEquals("Incorrect detector job name", detector.getDetectorId(), ((AnomalyDetectorJob) results[1]).getName()); + assertEquals("Incorrect detector job name", detector.getId(), ((AnomalyDetectorJob) results[1]).getName()); assertTrue(((AnomalyDetectorJob) results[1]).isEnabled()); - results = getAnomalyDetector(detector.getDetectorId(), false, client()); + results = getAnomalyDetector(detector.getId(), false, client()); assertEquals("Incorrect Location header", detector, results[0]); assertEquals("Should not return detector job", null, results[1]); } @@ -854,7 +839,7 @@ public void testStartAdJobWithExistingDetector() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -867,7 +852,7 @@ public void testStartAdJobWithExistingDetector() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -879,7 +864,7 @@ public void testStartAdJobWithExistingDetector() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -930,7 +915,7 @@ public void testStopAdJob() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -945,7 +930,7 @@ public void testStopAdJob() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_stop", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_stop", ImmutableMap.of(), "", null @@ -959,7 +944,7 @@ public void testStopAdJob() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_stop", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_stop", ImmutableMap.of(), "", null @@ -970,7 +955,7 @@ public void testStopAdJob() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_stop", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_stop", ImmutableMap.of(), "", null @@ -988,7 +973,7 @@ public void testStopNonExistingAdJobIndex() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_stop", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_stop", ImmutableMap.of(), "", null @@ -1002,7 +987,7 @@ public void testStopNonExistingAdJob() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -1031,7 +1016,7 @@ public void testStartDisabledAdjob() throws IOException { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -1042,7 +1027,7 @@ public void testStartDisabledAdjob() throws IOException { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_stop", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_stop", ImmutableMap.of(), "", null @@ -1053,7 +1038,7 @@ public void testStartDisabledAdjob() throws IOException { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -1075,7 +1060,7 @@ public void testStartAdjobWithNullFeatures() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -1096,7 +1081,7 @@ public void testStartAdjobWithEmptyFeatures() throws Exception { .makeRequest( client(), "POST", - TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getDetectorId() + "/_start", + TestHelpers.AD_BASE_DETECTORS_URI + "/" + detector.getId() + "/_start", ImmutableMap.of(), "", null @@ -1109,24 +1094,24 @@ public void testDefaultProfileAnomalyDetector() throws Exception { updateClusterSettings(ADEnabledSetting.AD_ENABLED, false); - Exception ex = expectThrows(ResponseException.class, () -> getDetectorProfile(detector.getDetectorId())); + Exception ex = expectThrows(ResponseException.class, () -> getDetectorProfile(detector.getId())); assertThat(ex.getMessage(), containsString(ADCommonMessages.DISABLED_ERR_MSG)); updateClusterSettings(ADEnabledSetting.AD_ENABLED, true); - Response profileResponse = getDetectorProfile(detector.getDetectorId()); + Response profileResponse = getDetectorProfile(detector.getId()); assertEquals("Incorrect profile status", RestStatus.OK, TestHelpers.restStatus(profileResponse)); } public void testAllProfileAnomalyDetector() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); - Response profileResponse = getDetectorProfile(detector.getDetectorId(), true); + Response profileResponse = getDetectorProfile(detector.getId(), true); assertEquals("Incorrect profile status", RestStatus.OK, TestHelpers.restStatus(profileResponse)); } public void testCustomizedProfileAnomalyDetector() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); - Response profileResponse = getDetectorProfile(detector.getDetectorId(), true, "/models/", client()); + Response profileResponse = getDetectorProfile(detector.getId(), true, "/models/", client()); assertEquals("Incorrect profile status", RestStatus.OK, TestHelpers.restStatus(profileResponse)); } @@ -1170,24 +1155,24 @@ public void testSearchAnomalyDetectorMatch() throws Exception { public void testRunDetectorWithNoEnabledFeature() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true, client(), false); - Assert.assertNotNull(detector.getDetectorId()); + Assert.assertNotNull(detector.getId()); Instant now = Instant.now(); ResponseException e = expectThrows( ResponseException.class, - () -> startAnomalyDetector(detector.getDetectorId(), new DateRange(now.minus(10, ChronoUnit.DAYS), now), client()) + () -> startAnomalyDetector(detector.getId(), new DateRange(now.minus(10, ChronoUnit.DAYS), now), client()) ); assertTrue(e.getMessage().contains("Can't start detector job as no enabled features configured")); } public void testDeleteAnomalyDetectorWhileRunning() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true, client()); - Assert.assertNotNull(detector.getDetectorId()); + Assert.assertNotNull(detector.getId()); Instant now = Instant.now(); - Response response = startAnomalyDetector(detector.getDetectorId(), new DateRange(now.minus(10, ChronoUnit.DAYS), now), client()); + Response response = startAnomalyDetector(detector.getId(), new DateRange(now.minus(10, ChronoUnit.DAYS), now), client()); Assert.assertThat(response.getStatusLine().toString(), CoreMatchers.containsString("200 OK")); // Deleting detector should fail while its running - Exception exception = expectThrows(IOException.class, () -> { deleteAnomalyDetector(detector.getDetectorId(), client()); }); + Exception exception = expectThrows(IOException.class, () -> { deleteAnomalyDetector(detector.getId(), client()); }); Assert.assertTrue(exception.getMessage().contains("Detector is running")); } @@ -1213,14 +1198,14 @@ public void testBackwardCompatibilityWithOpenDistro() throws IOException { // Get the detector using new _plugins API AnomalyDetector createdDetector = getAnomalyDetector(id, client()); - assertEquals("Get anomaly detector failed", createdDetector.getDetectorId(), id); + assertEquals("Get anomaly detector failed", createdDetector.getId(), id); // Delete the detector using legacy _opendistro API response = TestHelpers .makeRequest( client(), "DELETE", - TestHelpers.LEGACY_OPENDISTRO_AD_BASE_DETECTORS_URI + "/" + createdDetector.getDetectorId(), + TestHelpers.LEGACY_OPENDISTRO_AD_BASE_DETECTORS_URI + "/" + createdDetector.getId(), ImmutableMap.of(), "", null @@ -1261,7 +1246,7 @@ public void testValidateAnomalyDetectorWithDuplicateName() throws Exception { Map> messageMap = (Map>) XContentMapValues .extractValue("detector", responseMap); assertEquals("Validation returned duplicate detector name message", RestStatus.OK, TestHelpers.restStatus(resp)); - String errorMsg = String.format(Locale.ROOT, DUPLICATE_DETECTOR_MSG, detector.getName(), "[" + detector.getDetectorId() + "]"); + String errorMsg = String.format(Locale.ROOT, DUPLICATE_DETECTOR_MSG, detector.getName(), "[" + detector.getId() + "]"); assertEquals("duplicate error message", errorMsg, messageMap.get("name").get("message")); } @@ -1522,59 +1507,56 @@ public void testSearchTopAnomalyResultsWithInvalidInputs() throws IOException { // Missing start time Exception missingStartTimeException = expectThrows( IOException.class, - () -> { searchTopAnomalyResults(detector.getDetectorId(), false, "{\"end_time_ms\":2}", client()); } + () -> { searchTopAnomalyResults(detector.getId(), false, "{\"end_time_ms\":2}", client()); } ); assertTrue(missingStartTimeException.getMessage().contains("Must set both start time and end time with epoch of milliseconds")); // Missing end time Exception missingEndTimeException = expectThrows( IOException.class, - () -> { searchTopAnomalyResults(detector.getDetectorId(), false, "{\"start_time_ms\":1}", client()); } + () -> { searchTopAnomalyResults(detector.getId(), false, "{\"start_time_ms\":1}", client()); } ); assertTrue(missingEndTimeException.getMessage().contains("Must set both start time and end time with epoch of milliseconds")); // Start time > end time Exception invalidTimeException = expectThrows( IOException.class, - () -> { searchTopAnomalyResults(detector.getDetectorId(), false, "{\"start_time_ms\":2, \"end_time_ms\":1}", client()); } + () -> { searchTopAnomalyResults(detector.getId(), false, "{\"start_time_ms\":2, \"end_time_ms\":1}", client()); } ); assertTrue(invalidTimeException.getMessage().contains("Start time should be before end time")); // Invalid detector ID Exception invalidDetectorIdException = expectThrows( IOException.class, - () -> { - searchTopAnomalyResults(detector.getDetectorId() + "-invalid", false, "{\"start_time_ms\":1, \"end_time_ms\":2}", client()); - } + () -> { searchTopAnomalyResults(detector.getId() + "-invalid", false, "{\"start_time_ms\":1, \"end_time_ms\":2}", client()); } ); assertTrue(invalidDetectorIdException.getMessage().contains("Can't find config with id")); // Invalid order field - Exception invalidOrderException = expectThrows(IOException.class, () -> { - searchTopAnomalyResults( - detector.getDetectorId(), - false, - "{\"start_time_ms\":1, \"end_time_ms\":2, \"order\":\"invalid-order\"}", - client() - ); - }); + Exception invalidOrderException = expectThrows( + IOException.class, + () -> { + searchTopAnomalyResults( + detector.getId(), + false, + "{\"start_time_ms\":1, \"end_time_ms\":2, \"order\":\"invalid-order\"}", + client() + ); + } + ); assertTrue(invalidOrderException.getMessage().contains("Ordering by invalid-order is not a valid option")); // Negative size field Exception negativeSizeException = expectThrows( IOException.class, - () -> { - searchTopAnomalyResults(detector.getDetectorId(), false, "{\"start_time_ms\":1, \"end_time_ms\":2, \"size\":-1}", client()); - } + () -> { searchTopAnomalyResults(detector.getId(), false, "{\"start_time_ms\":1, \"end_time_ms\":2, \"size\":-1}", client()); } ); assertTrue(negativeSizeException.getMessage().contains("Size must be a positive integer")); // Zero size field Exception zeroSizeException = expectThrows( IOException.class, - () -> { - searchTopAnomalyResults(detector.getDetectorId(), false, "{\"start_time_ms\":1, \"end_time_ms\":2, \"size\":0}", client()); - } + () -> { searchTopAnomalyResults(detector.getId(), false, "{\"start_time_ms\":1, \"end_time_ms\":2, \"size\":0}", client()); } ); assertTrue(zeroSizeException.getMessage().contains("Size must be a positive integer")); @@ -1582,12 +1564,7 @@ public void testSearchTopAnomalyResultsWithInvalidInputs() throws IOException { Exception tooLargeSizeException = expectThrows( IOException.class, () -> { - searchTopAnomalyResults( - detector.getDetectorId(), - false, - "{\"start_time_ms\":1, \"end_time_ms\":2, \"size\":9999999}", - client() - ); + searchTopAnomalyResults(detector.getId(), false, "{\"start_time_ms\":1, \"end_time_ms\":2, \"size\":9999999}", client()); } ); assertTrue(tooLargeSizeException.getMessage().contains("Size cannot exceed")); @@ -1595,14 +1572,14 @@ public void testSearchTopAnomalyResultsWithInvalidInputs() throws IOException { // No existing task ID for detector Exception noTaskIdException = expectThrows( IOException.class, - () -> { searchTopAnomalyResults(detector.getDetectorId(), true, "{\"start_time_ms\":1, \"end_time_ms\":2}", client()); } + () -> { searchTopAnomalyResults(detector.getId(), true, "{\"start_time_ms\":1, \"end_time_ms\":2}", client()); } ); - assertTrue(noTaskIdException.getMessage().contains("No historical tasks found for detector ID " + detector.getDetectorId())); + assertTrue(noTaskIdException.getMessage().contains("No historical tasks found for detector ID " + detector.getId())); // Invalid category fields Exception invalidCategoryFieldsException = expectThrows(IOException.class, () -> { searchTopAnomalyResults( - detector.getDetectorId(), + detector.getId(), false, "{\"start_time_ms\":1, \"end_time_ms\":2, \"category_field\":[\"invalid-field\"]}", client() @@ -1611,7 +1588,7 @@ public void testSearchTopAnomalyResultsWithInvalidInputs() throws IOException { assertTrue( invalidCategoryFieldsException .getMessage() - .contains("Category field invalid-field doesn't exist for detector ID " + detector.getDetectorId()) + .contains("Category field invalid-field doesn't exist for detector ID " + detector.getId()) ); // Using detector with no category fields @@ -1629,18 +1606,13 @@ public void testSearchTopAnomalyResultsWithInvalidInputs() throws IOException { Exception noCategoryFieldsException = expectThrows( IOException.class, () -> { - searchTopAnomalyResults( - detectorWithNoCategoryFields.getDetectorId(), - false, - "{\"start_time_ms\":1, \"end_time_ms\":2}", - client() - ); + searchTopAnomalyResults(detectorWithNoCategoryFields.getId(), false, "{\"start_time_ms\":1, \"end_time_ms\":2}", client()); } ); assertTrue( noCategoryFieldsException .getMessage() - .contains("No category fields found for detector ID " + detectorWithNoCategoryFields.getDetectorId()) + .contains("No category fields found for detector ID " + detectorWithNoCategoryFields.getId()) ); } @@ -1672,7 +1644,7 @@ public void testSearchTopAnomalyResultsOnNonExistentResultIndex() throws IOExcep deleteIndexWithAdminClient(ADCommonName.ANOMALY_RESULT_INDEX_ALIAS + "*"); } Response response = searchTopAnomalyResults( - detector.getDetectorId(), + detector.getId(), false, "{\"size\":3,\"category_field\":[\"keyword-field\"]," + "\"start_time_ms\":0, \"end_time_ms\":1}", client() @@ -1711,7 +1683,7 @@ public void testSearchTopAnomalyResultsOnEmptyResultIndex() throws IOException { } TestHelpers.createEmptyAnomalyResultIndex(adminClient()); Response response = searchTopAnomalyResults( - detector.getDetectorId(), + detector.getId(), false, "{\"size\":3,\"category_field\":[\"keyword-field\"]," + "\"start_time_ms\":0, \"end_time_ms\":1}", client() @@ -1768,11 +1740,11 @@ public void testSearchTopAnomalyResultsOnPopulatedResultIndex() throws IOExcepti } }; AnomalyResult anomalyResult1 = TestHelpers - .randomHCADAnomalyDetectResult(detector.getDetectorId(), null, entityAttrs1, 0.5, 0.8, null, 5L, 5L); + .randomHCADAnomalyDetectResult(detector.getId(), null, entityAttrs1, 0.5, 0.8, null, 5L, 5L); AnomalyResult anomalyResult2 = TestHelpers - .randomHCADAnomalyDetectResult(detector.getDetectorId(), null, entityAttrs2, 0.5, 0.5, null, 5L, 5L); + .randomHCADAnomalyDetectResult(detector.getId(), null, entityAttrs2, 0.5, 0.5, null, 5L, 5L); AnomalyResult anomalyResult3 = TestHelpers - .randomHCADAnomalyDetectResult(detector.getDetectorId(), null, entityAttrs3, 0.5, 0.2, null, 5L, 5L); + .randomHCADAnomalyDetectResult(detector.getId(), null, entityAttrs3, 0.5, 0.2, null, 5L, 5L); TestHelpers.ingestDataToIndex(adminClient(), ADCommonName.ANOMALY_RESULT_INDEX_ALIAS, TestHelpers.toHttpEntity(anomalyResult1)); TestHelpers.ingestDataToIndex(adminClient(), ADCommonName.ANOMALY_RESULT_INDEX_ALIAS, TestHelpers.toHttpEntity(anomalyResult2)); @@ -1780,7 +1752,7 @@ public void testSearchTopAnomalyResultsOnPopulatedResultIndex() throws IOExcepti // Sorting by severity Response severityResponse = searchTopAnomalyResults( - detector.getDetectorId(), + detector.getId(), false, "{\"category_field\":[\"keyword-field\"]," + "\"start_time_ms\":0, \"end_time_ms\":10, \"order\":\"severity\"}", client() @@ -1799,7 +1771,7 @@ public void testSearchTopAnomalyResultsOnPopulatedResultIndex() throws IOExcepti // Sorting by occurrence Response occurrenceResponse = searchTopAnomalyResults( - detector.getDetectorId(), + detector.getId(), false, "{\"category_field\":[\"keyword-field\"]," + "\"start_time_ms\":0, \"end_time_ms\":10, \"order\":\"occurrence\"}", client() @@ -1818,7 +1790,7 @@ public void testSearchTopAnomalyResultsOnPopulatedResultIndex() throws IOExcepti // Sorting using all category fields Response allFieldsResponse = searchTopAnomalyResults( - detector.getDetectorId(), + detector.getId(), false, "{\"category_field\":[\"keyword-field\", \"ip-field\"]," + "\"start_time_ms\":0, \"end_time_ms\":10, \"order\":\"severity\"}", client() @@ -1870,10 +1842,10 @@ public void testSearchTopAnomalyResultsWithCustomResultIndex() throws IOExceptio } }; AnomalyResult anomalyResult = TestHelpers - .randomHCADAnomalyDetectResult(detector.getDetectorId(), null, entityAttrs, 0.5, 0.8, null, 5L, 5L); + .randomHCADAnomalyDetectResult(detector.getId(), null, entityAttrs, 0.5, 0.8, null, 5L, 5L); TestHelpers.ingestDataToIndex(client(), customResultIndexName, TestHelpers.toHttpEntity(anomalyResult)); - Response response = searchTopAnomalyResults(detector.getDetectorId(), false, "{\"start_time_ms\":0, \"end_time_ms\":10}", client()); + Response response = searchTopAnomalyResults(detector.getId(), false, "{\"start_time_ms\":0, \"end_time_ms\":10}", client()); Map responseMap = entityAsMap(response); @SuppressWarnings("unchecked") List> buckets = (ArrayList>) XContentMapValues.extractValue("buckets", responseMap); diff --git a/src/test/java/org/opensearch/ad/rest/HistoricalAnalysisRestApiIT.java b/src/test/java/org/opensearch/ad/rest/HistoricalAnalysisRestApiIT.java index a8800c1d3..fac7f9dc4 100644 --- a/src/test/java/org/opensearch/ad/rest/HistoricalAnalysisRestApiIT.java +++ b/src/test/java/org/opensearch/ad/rest/HistoricalAnalysisRestApiIT.java @@ -11,11 +11,11 @@ package org.opensearch.ad.rest; -import static org.opensearch.ad.TestHelpers.AD_BASE_STATS_URI; -import static org.opensearch.ad.TestHelpers.HISTORICAL_ANALYSIS_FINISHED_FAILED_STATS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.BATCH_TASK_PIECE_INTERVAL_SECONDS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RUNNING_ENTITIES_PER_DETECTOR_FOR_HISTORICAL_ANALYSIS; +import static org.opensearch.timeseries.TestHelpers.AD_BASE_STATS_URI; +import static org.opensearch.timeseries.TestHelpers.HISTORICAL_ANALYSIS_FINISHED_FAILED_STATS; import static org.opensearch.timeseries.stats.StatNames.AD_TOTAL_BATCH_TASK_EXECUTION_COUNT; import static org.opensearch.timeseries.stats.StatNames.MULTI_ENTITY_DETECTOR_COUNT; import static org.opensearch.timeseries.stats.StatNames.SINGLE_ENTITY_DETECTOR_COUNT; @@ -31,7 +31,6 @@ import org.junit.Before; import org.junit.Ignore; import org.opensearch.ad.HistoricalAnalysisRestTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskProfile; @@ -42,6 +41,7 @@ import org.opensearch.client.ResponseException; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.rest.RestStatus; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -110,7 +110,7 @@ private List startHistoricalAnalysis(int categoryFieldSize) throws Excep @SuppressWarnings("unchecked") private List startHistoricalAnalysis(int categoryFieldSize, String resultIndex) throws Exception { AnomalyDetector detector = createAnomalyDetector(categoryFieldSize, resultIndex); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); // start historical detector String taskId = startHistoricalAnalysis(detectorId); @@ -160,7 +160,7 @@ private List startHistoricalAnalysis(int categoryFieldSize, String resul public void testStopHistoricalAnalysis() throws Exception { // create historical detector AnomalyDetector detector = createAnomalyDetector(); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); // start historical detector String taskId = startHistoricalAnalysis(detectorId); @@ -194,7 +194,7 @@ public void testStopHistoricalAnalysis() throws Exception { public void testUpdateHistoricalAnalysis() throws IOException, IllegalAccessException { // create historical detector AnomalyDetector detector = createAnomalyDetector(); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); // update historical detector AnomalyDetector newDetector = randomAnomalyDetector(detector); @@ -208,11 +208,11 @@ public void testUpdateHistoricalAnalysis() throws IOException, IllegalAccessExce null ); Map responseBody = entityAsMap(updateResponse); - assertEquals(detector.getDetectorId(), responseBody.get("_id")); + assertEquals(detector.getId(), responseBody.get("_id")); assertEquals((detector.getVersion().intValue() + 1), (int) responseBody.get("_version")); // get historical detector - AnomalyDetector updatedDetector = getAnomalyDetector(detector.getDetectorId(), client()); + AnomalyDetector updatedDetector = getAnomalyDetector(detector.getId(), client()); assertNotEquals(updatedDetector.getLastUpdateTime(), detector.getLastUpdateTime()); assertEquals(newDetector.getName(), updatedDetector.getName()); assertEquals(newDetector.getDescription(), updatedDetector.getDescription()); @@ -221,7 +221,7 @@ public void testUpdateHistoricalAnalysis() throws IOException, IllegalAccessExce public void testUpdateRunningHistoricalAnalysis() throws Exception { // create historical detector AnomalyDetector detector = createAnomalyDetector(); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); // start historical detector startHistoricalAnalysis(detectorId); @@ -250,7 +250,7 @@ public void testUpdateRunningHistoricalAnalysis() throws Exception { public void testDeleteHistoricalAnalysis() throws IOException, IllegalAccessException { // create historical detector AnomalyDetector detector = createAnomalyDetector(); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); // delete detector Response response = TestHelpers @@ -263,7 +263,7 @@ public void testDeleteHistoricalAnalysis() throws IOException, IllegalAccessExce public void testDeleteRunningHistoricalDetector() throws Exception { // create historical detector AnomalyDetector detector = createAnomalyDetector(); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); // start historical detector startHistoricalAnalysis(detectorId); @@ -283,7 +283,7 @@ public void testDeleteRunningHistoricalDetector() throws Exception { public void testSearchTasks() throws IOException, InterruptedException, IllegalAccessException, ParseException { // create historical detector AnomalyDetector detector = createAnomalyDetector(); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); // start historical detector String taskId = startHistoricalAnalysis(detectorId); @@ -295,12 +295,12 @@ public void testSearchTasks() throws IOException, InterruptedException, IllegalA .makeRequest(client(), "POST", TestHelpers.AD_BASE_DETECTORS_URI + "/tasks/_search", ImmutableMap.of(), query, null); String searchResult = EntityUtils.toString(response.getEntity()); assertTrue(searchResult.contains(taskId)); - assertTrue(searchResult.contains(detector.getDetectorId())); + assertTrue(searchResult.contains(detector.getId())); } private AnomalyDetector randomAnomalyDetector(AnomalyDetector detector) { return new AnomalyDetector( - detector.getDetectorId(), + detector.getId(), null, randomAlphaOfLength(5), randomAlphaOfLength(5), @@ -308,15 +308,16 @@ private AnomalyDetector randomAnomalyDetector(AnomalyDetector detector) { detector.getIndices(), detector.getFeatureAttributes(), detector.getFilterQuery(), - detector.getDetectionInterval(), + detector.getInterval(), detector.getWindowDelay(), detector.getShingleSize(), detector.getUiMetadata(), detector.getSchemaVersion(), detector.getLastUpdateTime(), - detector.getCategoryField(), + detector.getCategoryFields(), detector.getUser(), - detector.getResultIndex() + detector.getCustomResultIndex(), + detector.getImputationOption() ); } diff --git a/src/test/java/org/opensearch/ad/rest/SecureADRestIT.java b/src/test/java/org/opensearch/ad/rest/SecureADRestIT.java index ecb34cbad..e8ef7149c 100644 --- a/src/test/java/org/opensearch/ad/rest/SecureADRestIT.java +++ b/src/test/java/org/opensearch/ad/rest/SecureADRestIT.java @@ -28,7 +28,6 @@ import org.junit.Assert; import org.junit.Before; import org.opensearch.ad.AnomalyDetectorRestTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyDetectorExecutionInput; @@ -37,6 +36,7 @@ import org.opensearch.commons.authuser.User; import org.opensearch.commons.rest.SecureRestClientBuilder; import org.opensearch.rest.RestStatus; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.DateRange; import com.google.common.collect.ImmutableList; @@ -161,7 +161,7 @@ public void deleteUserSetup() throws IOException { public void testCreateAnomalyDetectorWithWriteAccess() throws IOException { // User Alice has AD full access, should be able to create a detector AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); - Assert.assertNotNull("User alice could not create detector", aliceDetector.getDetectorId()); + Assert.assertNotNull("User alice could not create detector", aliceDetector.getId()); } public void testCreateAnomalyDetectorWithReadAccess() { @@ -173,24 +173,17 @@ public void testCreateAnomalyDetectorWithReadAccess() { public void testStartDetectorWithReadAccess() throws IOException { // User Bob has AD read access, should not be able to modify a detector AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); - Assert.assertNotNull(aliceDetector.getDetectorId()); - Exception exception = expectThrows( - IOException.class, - () -> { startAnomalyDetector(aliceDetector.getDetectorId(), null, bobClient); } - ); + Assert.assertNotNull(aliceDetector.getId()); + Exception exception = expectThrows(IOException.class, () -> { startAnomalyDetector(aliceDetector.getId(), null, bobClient); }); Assert.assertTrue(exception.getMessage().contains("no permissions for [cluster:admin/opendistro/ad/detector/jobmanagement]")); } public void testStartDetectorForWriteUser() throws IOException { // User Alice has AD full access, should be able to modify a detector AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); - Assert.assertNotNull(aliceDetector.getDetectorId()); + Assert.assertNotNull(aliceDetector.getId()); Instant now = Instant.now(); - Response response = startAnomalyDetector( - aliceDetector.getDetectorId(), - new DateRange(now.minus(10, ChronoUnit.DAYS), now), - aliceClient - ); + Response response = startAnomalyDetector(aliceDetector.getId(), new DateRange(now.minus(10, ChronoUnit.DAYS), now), aliceClient); MatcherAssert.assertThat(response.getStatusLine().toString(), CoreMatchers.containsString("200 OK")); } @@ -198,8 +191,8 @@ public void testFilterByDisabled() throws IOException { // User Alice has AD full access, should be able to create a detector AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); // User Cat has AD full access, should be able to get a detector - AnomalyDetector detector = getAnomalyDetector(aliceDetector.getDetectorId(), catClient); - Assert.assertEquals(aliceDetector.getDetectorId(), detector.getDetectorId()); + AnomalyDetector detector = getAnomalyDetector(aliceDetector.getId(), catClient); + Assert.assertEquals(aliceDetector.getId(), detector.getId()); } public void testGetApiFilterByEnabled() throws IOException { @@ -208,11 +201,8 @@ public void testGetApiFilterByEnabled() throws IOException { enableFilterBy(); // User Cat has AD full access, but is part of different backend role so Cat should not be able to access // Alice detector - Exception exception = expectThrows(IOException.class, () -> { getAnomalyDetector(aliceDetector.getDetectorId(), catClient); }); - Assert - .assertTrue( - exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) - ); + Exception exception = expectThrows(IOException.class, () -> { getAnomalyDetector(aliceDetector.getId(), catClient); }); + Assert.assertTrue(exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getId())); } private void confirmingClientIsAdmin() throws IOException { @@ -235,7 +225,7 @@ public void testGetApiFilterByEnabledForAdmin() throws IOException { AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); enableFilterBy(); confirmingClientIsAdmin(); - AnomalyDetector detector = getAnomalyDetector(aliceDetector.getDetectorId(), client()); + AnomalyDetector detector = getAnomalyDetector(aliceDetector.getId(), client()); Assert .assertArrayEquals( "User backend role of detector doesn't change", @@ -250,7 +240,7 @@ public void testUpdateApiFilterByEnabledForAdmin() throws IOException { enableFilterBy(); AnomalyDetector newDetector = new AnomalyDetector( - aliceDetector.getDetectorId(), + aliceDetector.getId(), aliceDetector.getVersion(), aliceDetector.getName(), randomAlphaOfLength(10), @@ -258,26 +248,27 @@ public void testUpdateApiFilterByEnabledForAdmin() throws IOException { aliceDetector.getIndices(), aliceDetector.getFeatureAttributes(), aliceDetector.getFilterQuery(), - aliceDetector.getDetectionInterval(), + aliceDetector.getInterval(), aliceDetector.getWindowDelay(), aliceDetector.getShingleSize(), aliceDetector.getUiMetadata(), aliceDetector.getSchemaVersion(), Instant.now(), - aliceDetector.getCategoryField(), + aliceDetector.getCategoryFields(), new User( randomAlphaOfLength(5), ImmutableList.of("odfe", randomAlphaOfLength(5)), ImmutableList.of(randomAlphaOfLength(5)), ImmutableList.of(randomAlphaOfLength(5)) ), - null + null, + aliceDetector.getImputationOption() ); // User client has admin all access, and has "opensearch" backend role so client should be able to update detector // But the detector's backend role should not be replaced as client's backend roles (all_access). - Response response = updateAnomalyDetector(aliceDetector.getDetectorId(), newDetector, client()); + Response response = updateAnomalyDetector(aliceDetector.getId(), newDetector, client()); Assert.assertEquals(response.getStatusLine().getStatusCode(), 200); - AnomalyDetector anomalyDetector = getAnomalyDetector(aliceDetector.getDetectorId(), aliceClient); + AnomalyDetector anomalyDetector = getAnomalyDetector(aliceDetector.getId(), aliceClient); Assert .assertArrayEquals( "odfe is still the backendrole, not opensearch", @@ -296,7 +287,7 @@ public void testUpdateApiFilterByEnabled() throws IOException { aliceDetector.getUser().getBackendRoles().toArray(new String[0]) ); AnomalyDetector newDetector = new AnomalyDetector( - aliceDetector.getDetectorId(), + aliceDetector.getId(), aliceDetector.getVersion(), aliceDetector.getName(), randomAlphaOfLength(10), @@ -304,28 +295,29 @@ public void testUpdateApiFilterByEnabled() throws IOException { aliceDetector.getIndices(), aliceDetector.getFeatureAttributes(), aliceDetector.getFilterQuery(), - aliceDetector.getDetectionInterval(), + aliceDetector.getInterval(), aliceDetector.getWindowDelay(), aliceDetector.getShingleSize(), aliceDetector.getUiMetadata(), aliceDetector.getSchemaVersion(), Instant.now(), - aliceDetector.getCategoryField(), + aliceDetector.getCategoryFields(), new User( randomAlphaOfLength(5), ImmutableList.of("odfe", randomAlphaOfLength(5)), ImmutableList.of(randomAlphaOfLength(5)), ImmutableList.of(randomAlphaOfLength(5)) ), - null + null, + aliceDetector.getImputationOption() ); enableFilterBy(); // User Fish has AD full access, and has "odfe" backend role which is one of Alice's backend role, so // Fish should be able to update detectors created by Alice. But the detector's backend role should // not be replaced as Fish's backend roles. - Response response = updateAnomalyDetector(aliceDetector.getDetectorId(), newDetector, fishClient); + Response response = updateAnomalyDetector(aliceDetector.getId(), newDetector, fishClient); Assert.assertEquals(response.getStatusLine().getStatusCode(), 200); - AnomalyDetector anomalyDetector = getAnomalyDetector(aliceDetector.getDetectorId(), aliceClient); + AnomalyDetector anomalyDetector = getAnomalyDetector(aliceDetector.getId(), aliceClient); Assert .assertArrayEquals( "Wrong user roles", @@ -343,12 +335,9 @@ public void testStartApiFilterByEnabled() throws IOException { Instant now = Instant.now(); Exception exception = expectThrows( IOException.class, - () -> { startAnomalyDetector(aliceDetector.getDetectorId(), new DateRange(now.minus(10, ChronoUnit.DAYS), now), catClient); } + () -> { startAnomalyDetector(aliceDetector.getId(), new DateRange(now.minus(10, ChronoUnit.DAYS), now), catClient); } ); - Assert - .assertTrue( - exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) - ); + Assert.assertTrue(exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getId())); } public void testStopApiFilterByEnabled() throws IOException { @@ -357,14 +346,8 @@ public void testStopApiFilterByEnabled() throws IOException { enableFilterBy(); // User Cat has AD full access, but is part of different backend role so Cat should not be able to access // Alice detector - Exception exception = expectThrows( - IOException.class, - () -> { stopAnomalyDetector(aliceDetector.getDetectorId(), catClient, true); } - ); - Assert - .assertTrue( - exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) - ); + Exception exception = expectThrows(IOException.class, () -> { stopAnomalyDetector(aliceDetector.getId(), catClient, true); }); + Assert.assertTrue(exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getId())); } public void testDeleteApiFilterByEnabled() throws IOException { @@ -373,11 +356,8 @@ public void testDeleteApiFilterByEnabled() throws IOException { enableFilterBy(); // User Cat has AD full access, but is part of different backend role so Cat should not be able to access // Alice detector - Exception exception = expectThrows(IOException.class, () -> { deleteAnomalyDetector(aliceDetector.getDetectorId(), catClient); }); - Assert - .assertTrue( - exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) - ); + Exception exception = expectThrows(IOException.class, () -> { deleteAnomalyDetector(aliceDetector.getId(), catClient); }); + Assert.assertTrue(exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getId())); } public void testCreateAnomalyDetectorWithNoBackendRole() throws IOException { @@ -416,19 +396,19 @@ public void testCreateAnomalyDetectorWithCustomResultIndex() throws IOException resultIndex = ADCommonName.CUSTOM_RESULT_INDEX_PREFIX + "test2"; TestHelpers.createIndexWithTimeField(client(), anomalyDetector.getIndices().get(0), anomalyDetector.getTimeField()); AnomalyDetector detectorOfCat = createAnomalyDetector(cloneDetector(anomalyDetector, resultIndex), true, catClient); - assertEquals(resultIndex, detectorOfCat.getResultIndex()); + assertEquals(resultIndex, detectorOfCat.getCustomResultIndex()); } public void testPreviewAnomalyDetectorWithWriteAccess() throws IOException { // User Alice has AD full access, should be able to create/preview a detector AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( - aliceDetector.getDetectorId(), + aliceDetector.getId(), Instant.now().minusSeconds(60 * 10), Instant.now(), null ); - Response response = previewAnomalyDetector(aliceDetector.getDetectorId(), aliceClient, input); + Response response = previewAnomalyDetector(aliceDetector.getId(), aliceClient, input); Assert.assertEquals(RestStatus.OK, TestHelpers.restStatus(response)); } @@ -442,10 +422,7 @@ public void testPreviewAnomalyDetectorWithReadAccess() throws IOException { null ); // User bob has AD read access, should not be able to preview a detector - Exception exception = expectThrows( - IOException.class, - () -> { previewAnomalyDetector(aliceDetector.getDetectorId(), bobClient, input); } - ); + Exception exception = expectThrows(IOException.class, () -> { previewAnomalyDetector(aliceDetector.getId(), bobClient, input); }); Assert.assertTrue(exception.getMessage().contains("no permissions for [cluster:admin/opendistro/ad/detector/preview]")); } @@ -453,7 +430,7 @@ public void testPreviewAnomalyDetectorWithFilterEnabled() throws IOException { // User Alice has AD full access, should be able to create a detector AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( - aliceDetector.getDetectorId(), + aliceDetector.getId(), Instant.now().minusSeconds(60 * 10), Instant.now(), null @@ -461,31 +438,22 @@ public void testPreviewAnomalyDetectorWithFilterEnabled() throws IOException { enableFilterBy(); // User Cat has AD full access, but is part of different backend role so Cat should not be able to access // Alice detector - Exception exception = expectThrows( - IOException.class, - () -> { previewAnomalyDetector(aliceDetector.getDetectorId(), catClient, input); } - ); - Assert - .assertTrue( - exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getDetectorId()) - ); + Exception exception = expectThrows(IOException.class, () -> { previewAnomalyDetector(aliceDetector.getId(), catClient, input); }); + Assert.assertTrue(exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getId())); } public void testPreviewAnomalyDetectorWithNoReadPermissionOfIndex() throws IOException { // User Alice has AD full access, should be able to create a detector AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); AnomalyDetectorExecutionInput input = new AnomalyDetectorExecutionInput( - aliceDetector.getDetectorId(), + aliceDetector.getId(), Instant.now().minusSeconds(60 * 10), Instant.now(), aliceDetector ); enableFilterBy(); // User elk has no read permission of index - Exception exception = expectThrows( - Exception.class, - () -> { previewAnomalyDetector(aliceDetector.getDetectorId(), elkClient, input); } - ); + Exception exception = expectThrows(Exception.class, () -> { previewAnomalyDetector(aliceDetector.getId(), elkClient, input); }); Assert .assertTrue( "actual msg: " + exception.getMessage(), diff --git a/src/test/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandlerTests.java b/src/test/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandlerTests.java index 71b63b2a9..953e6cc20 100644 --- a/src/test/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandlerTests.java +++ b/src/test/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandlerTests.java @@ -36,7 +36,6 @@ import org.opensearch.action.update.UpdateResponse; import org.opensearch.ad.ExecuteADResultResponseRecorder; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.AnomalyDetectionIndices; @@ -58,6 +57,7 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.InternalFailure; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.model.Feature; diff --git a/src/test/java/org/opensearch/ad/task/ADTaskCacheManagerTests.java b/src/test/java/org/opensearch/ad/task/ADTaskCacheManagerTests.java index 2fde54a42..ba9698d6a 100644 --- a/src/test/java/org/opensearch/ad/task/ADTaskCacheManagerTests.java +++ b/src/test/java/org/opensearch/ad/task/ADTaskCacheManagerTests.java @@ -34,7 +34,6 @@ import org.junit.After; import org.junit.Before; import org.opensearch.ad.MemoryTracker; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.ADTaskType; @@ -44,6 +43,7 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.DuplicateTaskException; import org.opensearch.timeseries.common.exception.LimitExceededException; @@ -94,7 +94,7 @@ public void testPutTask() throws IOException { adTaskCacheManager.add(adTask); assertEquals(1, adTaskCacheManager.size()); assertTrue(adTaskCacheManager.contains(adTask.getTaskId())); - assertTrue(adTaskCacheManager.containsTaskOfDetector(adTask.getDetectorId())); + assertTrue(adTaskCacheManager.containsTaskOfDetector(adTask.getId())); assertNotNull(adTaskCacheManager.getTRcfModel(adTask.getTaskId())); assertNotNull(adTaskCacheManager.getShingle(adTask.getTaskId())); assertFalse(adTaskCacheManager.isThresholdModelTrained(adTask.getTaskId())); @@ -116,7 +116,7 @@ public void testPutDuplicateTask() throws IOException { ADTaskState.INIT, adTask1.getExecutionEndTime(), adTask1.getStoppedBy(), - adTask1.getDetectorId(), + adTask1.getId(), adTask1.getDetector(), ADTaskType.HISTORICAL_SINGLE_ENTITY ); @@ -140,7 +140,7 @@ public void testPutMultipleEntityTasks() throws IOException { ADTaskState.CREATED, Instant.now(), null, - detector.getDetectorId(), + detector.getId(), detector, ADTaskType.HISTORICAL_HC_ENTITY ); @@ -150,13 +150,13 @@ public void testPutMultipleEntityTasks() throws IOException { ADTaskState.CREATED, Instant.now(), null, - detector.getDetectorId(), + detector.getId(), detector, ADTaskType.HISTORICAL_HC_ENTITY ); adTaskCacheManager.add(adTask1); adTaskCacheManager.add(adTask2); - List tasks = adTaskCacheManager.getTasksOfDetector(detector.getDetectorId()); + List tasks = adTaskCacheManager.getTasksOfDetector(detector.getId()); assertEquals(2, tasks.size()); } @@ -223,8 +223,8 @@ public void testCancelByDetectorId() throws IOException { when(memoryTracker.canAllocateReserved(anyLong())).thenReturn(true); ADTask adTask = TestHelpers.randomAdTask(); adTaskCacheManager.add(adTask); - String detectorId = adTask.getDetectorId(); - String detectorTaskId = adTask.getDetectorId(); + String detectorId = adTask.getId(); + String detectorTaskId = adTask.getId(); String reason = randomAlphaOfLength(10); String userName = randomAlphaOfLength(5); ADTaskCancellationState state = adTaskCacheManager.cancelByDetectorId(detectorId, detectorTaskId, reason, userName); @@ -430,7 +430,7 @@ private List addHCDetectorCache() throws IOException { true, ImmutableList.of(randomAlphaOfLength(5)) ); - String detectorId = detector.getDetectorId(); + String detectorId = detector.getId(); ADTask adDetectorTask = TestHelpers .randomAdTask( randomAlphaOfLength(5), diff --git a/src/test/java/org/opensearch/ad/task/ADTaskManagerTests.java b/src/test/java/org/opensearch/ad/task/ADTaskManagerTests.java index 50843a56e..f3f32ac46 100644 --- a/src/test/java/org/opensearch/ad/task/ADTaskManagerTests.java +++ b/src/test/java/org/opensearch/ad/task/ADTaskManagerTests.java @@ -28,14 +28,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import static org.opensearch.ad.TestHelpers.randomAdTask; -import static org.opensearch.ad.TestHelpers.randomAnomalyDetector; -import static org.opensearch.ad.TestHelpers.randomDetectionDateRange; -import static org.opensearch.ad.TestHelpers.randomDetector; -import static org.opensearch.ad.TestHelpers.randomFeature; -import static org.opensearch.ad.TestHelpers.randomIntervalSchedule; -import static org.opensearch.ad.TestHelpers.randomIntervalTimeConfiguration; -import static org.opensearch.ad.TestHelpers.randomUser; import static org.opensearch.ad.constant.ADCommonName.ANOMALY_RESULT_INDEX_ALIAS; import static org.opensearch.ad.constant.ADCommonName.DETECTION_STATE_INDEX; import static org.opensearch.ad.model.Entity.createSingleAttributeEntity; @@ -46,6 +38,14 @@ import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RUNNING_ENTITIES_PER_DETECTOR_FOR_HISTORICAL_ANALYSIS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.opensearch.timeseries.TestHelpers.randomAdTask; +import static org.opensearch.timeseries.TestHelpers.randomAnomalyDetector; +import static org.opensearch.timeseries.TestHelpers.randomDetectionDateRange; +import static org.opensearch.timeseries.TestHelpers.randomDetector; +import static org.opensearch.timeseries.TestHelpers.randomFeature; +import static org.opensearch.timeseries.TestHelpers.randomIntervalSchedule; +import static org.opensearch.timeseries.TestHelpers.randomIntervalTimeConfiguration; +import static org.opensearch.timeseries.TestHelpers.randomUser; import static org.opensearch.timeseries.constant.CommonMessages.CREATE_INDEX_NOT_ACKNOWLEDGED; import java.io.IOException; @@ -81,7 +81,6 @@ import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.action.update.UpdateResponse; import org.opensearch.ad.ADUnitTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.cluster.HashRing; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.ad.mock.model.MockSimpleLog; @@ -127,6 +126,7 @@ import org.opensearch.search.aggregations.InternalAggregations; import org.opensearch.search.internal.InternalSearchResponse; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.DuplicateTaskException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.DateRange; @@ -292,7 +292,7 @@ private void setupGetDetector(AnomalyDetector detector) { new GetResponse( new GetResult( CommonName.CONFIG_INDEX, - detector.getDetectorId(), + detector.getId(), UNASSIGNED_SEQ_NO, 0, -1, @@ -380,7 +380,7 @@ public void testStartDetectorWithNoEnabledFeature() throws IOException { adTaskManager .startDetector( - detector.getDetectorId(), + detector.getId(), detectionDateRange, indexAnomalyDetectorJobActionHandler, randomUser(), @@ -399,7 +399,7 @@ public void testStartDetectorForHistoricalAnalysis() throws IOException { adTaskManager .startDetector( - detector.getDetectorId(), + detector.getId(), detectionDateRange, indexAnomalyDetectorJobActionHandler, randomUser(), diff --git a/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultRequestTests.java b/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultRequestTests.java index 0ee1efd73..dd200f8f4 100644 --- a/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultRequestTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultRequestTests.java @@ -14,9 +14,9 @@ import java.io.IOException; import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.ADTask; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; public class ADBatchAnomalyResultRequestTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultTransportActionTests.java index ed727d81a..1654ede0c 100644 --- a/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultTransportActionTests.java @@ -11,10 +11,10 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.HISTORICAL_ANALYSIS_FINISHED_FAILED_STATS; import static org.opensearch.ad.settings.ADEnabledSetting.AD_ENABLED; import static org.opensearch.ad.settings.AnomalyDetectorSettings.BATCH_TASK_PIECE_INTERVAL_SECONDS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE; +import static org.opensearch.timeseries.TestHelpers.HISTORICAL_ANALYSIS_FINISHED_FAILED_STATS; import java.io.IOException; import java.time.Instant; @@ -25,7 +25,6 @@ import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.get.GetResponse; import org.opensearch.ad.HistoricalAnalysisIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; @@ -33,6 +32,7 @@ import org.opensearch.common.io.stream.NotSerializableExceptionWrapper; import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.model.DateRange; diff --git a/src/test/java/org/opensearch/ad/transport/ADCancelTaskNodeRequestTests.java b/src/test/java/org/opensearch/ad/transport/ADCancelTaskNodeRequestTests.java index 5ca96bdb2..546628a86 100644 --- a/src/test/java/org/opensearch/ad/transport/ADCancelTaskNodeRequestTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADCancelTaskNodeRequestTests.java @@ -28,7 +28,7 @@ public void testParseOldADCancelTaskNodeRequestTest() throws IOException { oldRequest.writeTo(output); StreamInput input = output.bytes().streamInput(); ADCancelTaskNodeRequest parsedRequest = new ADCancelTaskNodeRequest(input); - assertEquals(detectorId, parsedRequest.getDetectorId()); + assertEquals(detectorId, parsedRequest.getId()); assertEquals(userName, parsedRequest.getUserName()); assertNull(parsedRequest.getDetectorTaskId()); assertNull(parsedRequest.getReason()); diff --git a/src/test/java/org/opensearch/ad/transport/ADCancelTaskTests.java b/src/test/java/org/opensearch/ad/transport/ADCancelTaskTests.java index 91e36c934..85d839a1a 100644 --- a/src/test/java/org/opensearch/ad/transport/ADCancelTaskTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADCancelTaskTests.java @@ -11,7 +11,7 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.randomDiscoveryNode; +import static org.opensearch.timeseries.TestHelpers.randomDiscoveryNode; import java.io.IOException; import java.util.List; @@ -41,7 +41,7 @@ public void testADCancelTaskRequest() throws IOException { request.writeTo(output); NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); ADCancelTaskRequest parsedRequest = new ADCancelTaskRequest(input); - assertEquals(request.getDetectorId(), parsedRequest.getDetectorId()); + assertEquals(request.getId(), parsedRequest.getId()); assertEquals(request.getUserName(), parsedRequest.getUserName()); } diff --git a/src/test/java/org/opensearch/ad/transport/ADResultBulkTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/ADResultBulkTransportActionTests.java index 14451d5b1..83e83b25c 100644 --- a/src/test/java/org/opensearch/ad/transport/ADResultBulkTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADResultBulkTransportActionTests.java @@ -32,8 +32,6 @@ import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; @@ -41,9 +39,11 @@ import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexingPressure; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; -public class ADResultBulkTransportActionTests extends AbstractADTest { +public class ADResultBulkTransportActionTests extends AbstractTimeSeriesTest { private ADResultBulkTransportAction resultBulk; private TransportService transportService; private ClusterService clusterService; diff --git a/src/test/java/org/opensearch/ad/transport/ADTaskProfileResponseTests.java b/src/test/java/org/opensearch/ad/transport/ADTaskProfileResponseTests.java index 00fbc319d..1807a6497 100644 --- a/src/test/java/org/opensearch/ad/transport/ADTaskProfileResponseTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADTaskProfileResponseTests.java @@ -11,7 +11,7 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.randomDiscoveryNode; +import static org.opensearch.timeseries.TestHelpers.randomDiscoveryNode; import java.io.IOException; import java.util.List; diff --git a/src/test/java/org/opensearch/ad/transport/ADTaskProfileTests.java b/src/test/java/org/opensearch/ad/transport/ADTaskProfileTests.java index c586fe1a0..49e4172ae 100644 --- a/src/test/java/org/opensearch/ad/transport/ADTaskProfileTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADTaskProfileTests.java @@ -11,7 +11,7 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.randomDiscoveryNode; +import static org.opensearch.timeseries.TestHelpers.randomDiscoveryNode; import java.io.IOException; import java.time.Instant; @@ -22,7 +22,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.model.ADTaskProfile; import org.opensearch.cluster.ClusterName; @@ -35,6 +34,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; @@ -56,7 +56,7 @@ public void testADTaskProfileRequest() throws IOException { request.writeTo(output); NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); ADTaskProfileRequest parsedRequest = new ADTaskProfileRequest(input); - assertEquals(request.getDetectorId(), parsedRequest.getDetectorId()); + assertEquals(request.getId(), parsedRequest.getId()); } public void testInvalidADTaskProfileRequest() { diff --git a/src/test/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportActionTests.java index c8c3bdfdd..c9808886a 100644 --- a/src/test/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportActionTests.java @@ -11,7 +11,6 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.HISTORICAL_ANALYSIS_FINISHED_FAILED_STATS; import static org.opensearch.ad.constant.ADCommonMessages.DETECTOR_IS_RUNNING; import static org.opensearch.ad.settings.AnomalyDetectorSettings.BATCH_TASK_PIECE_INTERVAL_SECONDS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE; @@ -21,6 +20,7 @@ import static org.opensearch.ad.util.RestHandlerUtils.STOP_JOB; import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +import static org.opensearch.timeseries.TestHelpers.HISTORICAL_ANALYSIS_FINISHED_FAILED_STATS; import static org.opensearch.timeseries.constant.CommonMessages.FAIL_TO_FIND_CONFIG_MSG; import java.io.IOException; @@ -41,7 +41,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.get.GetResponse; import org.opensearch.ad.HistoricalAnalysisIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.mock.model.MockSimpleLog; import org.opensearch.ad.mock.transport.MockAnomalyDetectorJobAction; @@ -56,6 +55,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexNotFoundException; import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.stats.StatNames; @@ -178,7 +178,7 @@ public void testStartHistoricalAnalysisForSingleCategoryHCWithUser() throws IOEx ADTask adTask = getADTask(response.getId()); assertEquals(ADTaskType.HISTORICAL_HC_DETECTOR.toString(), adTask.getTaskType()); assertTrue(HISTORICAL_ANALYSIS_FINISHED_FAILED_STATS.contains(adTask.getState())); - assertEquals(categoryField, adTask.getDetector().getCategoryField().get(0)); + assertEquals(categoryField, adTask.getDetector().getCategoryFields().get(0)); if (ADTaskState.FINISHED.name().equals(adTask.getState())) { List adTasks = searchADTasks(detectorId, true, 100); @@ -233,8 +233,8 @@ public void testStartHistoricalAnalysisForMultiCategoryHCWithUser() throws IOExc assertEquals(ADTaskType.HISTORICAL_HC_DETECTOR.toString(), adTask.getTaskType()); // Task may fail if memory circuit breaker triggered assertTrue(HISTORICAL_ANALYSIS_FINISHED_FAILED_STATS.contains(adTask.getState())); - assertEquals(categoryField, adTask.getDetector().getCategoryField().get(0)); - assertEquals(ipField, adTask.getDetector().getCategoryField().get(1)); + assertEquals(categoryField, adTask.getDetector().getCategoryFields().get(0)); + assertEquals(ipField, adTask.getDetector().getCategoryFields().get(1)); if (ADTaskState.FINISHED.name().equals(adTask.getState())) { List adTasks = searchADTasks(detectorId, taskId, true, 100); @@ -447,7 +447,7 @@ public void testStopHistoricalDetector() throws IOException, InterruptedExceptio if (taskRunning) { // It's possible that the task not started on worker node yet. Recancel it to make sure // task cancelled. - AnomalyDetectorJobRequest request = stopDetectorJobRequest(adTask.getDetectorId(), true); + AnomalyDetectorJobRequest request = stopDetectorJobRequest(adTask.getId(), true); client().execute(AnomalyDetectorJobAction.INSTANCE, request).actionGet(10000); } return !taskRunning; @@ -462,7 +462,7 @@ public void testStopHistoricalDetector() throws IOException, InterruptedExceptio public void testProfileHistoricalDetector() throws IOException, InterruptedException { ADTask adTask = startHistoricalAnalysis(startTime, endTime); - GetAnomalyDetectorRequest request = taskProfileRequest(adTask.getDetectorId()); + GetAnomalyDetectorRequest request = taskProfileRequest(adTask.getId()); GetAnomalyDetectorResponse response = client().execute(GetAnomalyDetectorAction.INSTANCE, request).actionGet(10000); assertTrue(response.getDetectorProfile().getAdTaskProfile() != null); @@ -479,7 +479,7 @@ public void testProfileHistoricalDetector() throws IOException, InterruptedExcep assertNull(response.getDetectorProfile().getAdTaskProfile().getNodeId()); ADTask profileAdTask = response.getDetectorProfile().getAdTaskProfile().getAdTask(); assertEquals(finishedTask.getTaskId(), profileAdTask.getTaskId()); - assertEquals(finishedTask.getDetectorId(), profileAdTask.getDetectorId()); + assertEquals(finishedTask.getId(), profileAdTask.getId()); assertEquals(finishedTask.getDetector(), profileAdTask.getDetector()); assertEquals(finishedTask.getState(), profileAdTask.getState()); } @@ -488,8 +488,8 @@ public void testProfileWithMultipleRunningTask() throws IOException { ADTask adTask1 = startHistoricalAnalysis(startTime, endTime); ADTask adTask2 = startHistoricalAnalysis(startTime, endTime); - GetAnomalyDetectorRequest request1 = taskProfileRequest(adTask1.getDetectorId()); - GetAnomalyDetectorRequest request2 = taskProfileRequest(adTask2.getDetectorId()); + GetAnomalyDetectorRequest request1 = taskProfileRequest(adTask1.getId()); + GetAnomalyDetectorRequest request2 = taskProfileRequest(adTask2.getId()); GetAnomalyDetectorResponse response1 = client().execute(GetAnomalyDetectorAction.INSTANCE, request1).actionGet(10000); GetAnomalyDetectorResponse response2 = client().execute(GetAnomalyDetectorAction.INSTANCE, request2).actionGet(10000); ADTaskProfile taskProfile1 = response1.getDetectorProfile().getAdTaskProfile(); diff --git a/src/test/java/org/opensearch/ad/transport/AnomalyResultTests.java b/src/test/java/org/opensearch/ad/transport/AnomalyResultTests.java index 20477ba2d..4185fe6f7 100644 --- a/src/test/java/org/opensearch/ad/transport/AnomalyResultTests.java +++ b/src/test/java/org/opensearch/ad/transport/AnomalyResultTests.java @@ -33,8 +33,8 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import static org.opensearch.ad.TestHelpers.createIndexBlockedState; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.timeseries.TestHelpers.createIndexBlockedState; import java.io.IOException; import java.time.Instant; @@ -65,9 +65,7 @@ import org.opensearch.action.index.IndexResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.cluster.HashRing; import org.opensearch.ad.common.exception.JsonPathNotFoundException; @@ -109,6 +107,8 @@ import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.shard.ShardId; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.InternalFailure; import org.opensearch.timeseries.common.exception.LimitExceededException; @@ -132,7 +132,7 @@ import com.google.gson.JsonElement; -public class AnomalyResultTests extends AbstractADTest { +public class AnomalyResultTests extends AbstractTimeSeriesTest { private Settings settings; private TransportService transportService; private ClusterService clusterService; @@ -191,14 +191,14 @@ public void setUp() throws Exception { userIndex.add("test*"); when(detector.getIndices()).thenReturn(userIndex); adID = "123"; - when(detector.getDetectorId()).thenReturn(adID); - when(detector.getCategoryField()).thenReturn(null); + when(detector.getId()).thenReturn(adID); + when(detector.getCategoryFields()).thenReturn(null); doAnswer(invocation -> { ActionListener> listener = invocation.getArgument(1); listener.onResponse(Optional.of(detector)); return null; }).when(stateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); - when(detector.getDetectorIntervalInMinutes()).thenReturn(1L); + when(detector.getIntervalInMinutes()).thenReturn(1L); hashRing = mock(HashRing.class); Optional localNode = Optional.of(clusterService.state().nodes().getLocalNode()); @@ -307,10 +307,7 @@ public void setUp() throws Exception { DetectorInternalState.Builder result = new DetectorInternalState.Builder().lastUpdateTime(Instant.now()); - listener - .onResponse( - TestHelpers.createGetResponse(result.build(), detector.getDetectorId(), ADCommonName.DETECTION_STATE_INDEX) - ); + listener.onResponse(TestHelpers.createGetResponse(result.build(), detector.getId(), ADCommonName.DETECTION_STATE_INDEX)); } diff --git a/src/test/java/org/opensearch/ad/transport/AnomalyResultTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/AnomalyResultTransportActionTests.java index fa98b0df7..562b7de69 100644 --- a/src/test/java/org/opensearch/ad/transport/AnomalyResultTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/AnomalyResultTransportActionTests.java @@ -11,7 +11,7 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.randomQuery; +import static org.opensearch.timeseries.TestHelpers.randomQuery; import java.io.IOException; import java.time.Instant; @@ -24,12 +24,12 @@ import org.junit.Before; import org.opensearch.action.get.GetResponse; import org.opensearch.ad.ADIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.common.io.stream.NotSerializableExceptionWrapper; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.test.rest.OpenSearchRestTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.IntervalTimeConfiguration; @@ -219,7 +219,8 @@ private AnomalyDetector randomDetector(List indices, List featu Instant.now(), null, null, - null + null, + TestHelpers.randomImputationOption() ); } @@ -241,7 +242,8 @@ private AnomalyDetector randomHCDetector(List indices, List fea Instant.now(), ImmutableList.of(categoryField), null, - null + null, + TestHelpers.randomImputationOption() ); } diff --git a/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java index 174176f0c..bcb49d81b 100644 --- a/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java @@ -23,7 +23,6 @@ import org.junit.Before; import org.opensearch.Version; import org.opensearch.action.support.ActionFilters; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; @@ -42,13 +41,14 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.transport.TransportService; import test.org.opensearch.ad.util.JsonDeserializer; import com.google.gson.JsonElement; -public class CronTransportActionTests extends AbstractADTest { +public class CronTransportActionTests extends AbstractTimeSeriesTest { private CronTransportAction action; private String localNodeID; diff --git a/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTests.java b/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTests.java index 694d7f58a..b4d8a6b76 100644 --- a/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTests.java +++ b/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTests.java @@ -35,8 +35,6 @@ import org.opensearch.action.get.GetResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyDetectorJob; @@ -56,12 +54,14 @@ import org.opensearch.index.get.GetResult; import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportService; -public class DeleteAnomalyDetectorTests extends AbstractADTest { +public class DeleteAnomalyDetectorTests extends AbstractTimeSeriesTest { private DeleteAnomalyDetectorTransportAction action; private TransportService transportService; private ActionFilters actionFilters; @@ -298,7 +298,7 @@ private void setupMocks( Instant.now(), 60L, TestHelpers.randomUser(), - jobParameter.getResultIndex() + jobParameter.getCustomResultIndex() ).toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS) ), Collections.emptyMap(), diff --git a/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTransportActionTests.java index 3c892d396..ac81ecf25 100644 --- a/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTransportActionTests.java @@ -18,9 +18,9 @@ import org.junit.Before; import org.opensearch.action.delete.DeleteResponse; import org.opensearch.ad.HistoricalAnalysisIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.Feature; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/transport/DeleteAnomalyResultsTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/DeleteAnomalyResultsTransportActionTests.java index dad52b44c..5653a577c 100644 --- a/src/test/java/org/opensearch/ad/transport/DeleteAnomalyResultsTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/DeleteAnomalyResultsTransportActionTests.java @@ -11,8 +11,8 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.matchAllRequest; import static org.opensearch.ad.constant.ADCommonName.ANOMALY_RESULT_INDEX_ALIAS; +import static org.opensearch.timeseries.TestHelpers.matchAllRequest; import java.io.IOException; import java.util.concurrent.TimeUnit; @@ -20,11 +20,11 @@ import org.junit.Ignore; import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.HistoricalAnalysisIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryRequest; +import org.opensearch.timeseries.TestHelpers; public class DeleteAnomalyResultsTransportActionTests extends HistoricalAnalysisIntegTestCase { diff --git a/src/test/java/org/opensearch/ad/transport/DeleteModelTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/DeleteModelTransportActionTests.java index e20e8e742..9246a63ce 100644 --- a/src/test/java/org/opensearch/ad/transport/DeleteModelTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/DeleteModelTransportActionTests.java @@ -27,7 +27,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.support.ActionFilters; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; @@ -47,13 +46,14 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.transport.TransportService; import test.org.opensearch.ad.util.JsonDeserializer; import com.google.gson.JsonElement; -public class DeleteModelTransportActionTests extends AbstractADTest { +public class DeleteModelTransportActionTests extends AbstractTimeSeriesTest { private DeleteModelTransportAction action; private String localNodeID; diff --git a/src/test/java/org/opensearch/ad/transport/DeleteTests.java b/src/test/java/org/opensearch/ad/transport/DeleteTests.java index 4b9994d2b..3721f5dec 100644 --- a/src/test/java/org/opensearch/ad/transport/DeleteTests.java +++ b/src/test/java/org/opensearch/ad/transport/DeleteTests.java @@ -41,7 +41,6 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.common.exception.JsonPathNotFoundException; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; @@ -61,12 +60,13 @@ import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.transport.TransportService; import test.org.opensearch.ad.util.ClusterCreation; import test.org.opensearch.ad.util.JsonDeserializer; -public class DeleteTests extends AbstractADTest { +public class DeleteTests extends AbstractTimeSeriesTest { private DeleteModelResponse response; private List failures; private List deleteModelResponse; @@ -202,7 +202,7 @@ public void testJsonRequestDeleteModel() throws IOException, JsonPathNotFoundExc public void testNewResponse() throws IOException { StreamInput input = mock(StreamInput.class); - when(input.readByte()).thenReturn((byte) 0x01); + when(input.readBoolean()).thenReturn(true); AcknowledgedResponse response = new AcknowledgedResponse(input); assertTrue(response.isAcknowledged()); diff --git a/src/test/java/org/opensearch/ad/transport/EntityProfileTests.java b/src/test/java/org/opensearch/ad/transport/EntityProfileTests.java index 575d80cf2..2a8a33130 100644 --- a/src/test/java/org/opensearch/ad/transport/EntityProfileTests.java +++ b/src/test/java/org/opensearch/ad/transport/EntityProfileTests.java @@ -31,8 +31,6 @@ import org.opensearch.Version; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; import org.opensearch.ad.cluster.HashRing; @@ -50,6 +48,8 @@ import org.opensearch.common.transport.TransportAddress; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.transport.ConnectTransportException; @@ -65,7 +65,7 @@ import test.org.opensearch.ad.util.FakeNode; import test.org.opensearch.ad.util.JsonDeserializer; -public class EntityProfileTests extends AbstractADTest { +public class EntityProfileTests extends AbstractTimeSeriesTest { private String detectorId = "yecrdnUBqurvo9uKU_d8"; private String entityValue = "app_0"; private String nodeId = "abc"; diff --git a/src/test/java/org/opensearch/ad/transport/EntityResultTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/EntityResultTransportActionTests.java index 6765dc2e7..2e0aac6b4 100644 --- a/src/test/java/org/opensearch/ad/transport/EntityResultTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/EntityResultTransportActionTests.java @@ -49,10 +49,8 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.AnomalyDetectorJobRunnerTests; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; @@ -83,6 +81,8 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.LimitExceededException; import org.opensearch.timeseries.constant.CommonMessages; @@ -97,7 +97,7 @@ import com.google.gson.JsonArray; import com.google.gson.JsonElement; -public class EntityResultTransportActionTests extends AbstractADTest { +public class EntityResultTransportActionTests extends AbstractTimeSeriesTest { EntityResultTransportAction entityResult; ActionFilters actionFilters; TransportService transportService; @@ -216,13 +216,13 @@ public void setUp() throws Exception { cacheMissData = new double[] { 0.1 }; cacheHitEntity = "0.0.0.2"; cacheHitData = new double[] { 0.2 }; - cacheMissEntityObj = Entity.createSingleAttributeEntity(detector.getCategoryField().get(0), cacheMissEntity); + cacheMissEntityObj = Entity.createSingleAttributeEntity(detector.getCategoryFields().get(0), cacheMissEntity); entities.put(cacheMissEntityObj, cacheMissData); - cacheHitEntityObj = Entity.createSingleAttributeEntity(detector.getCategoryField().get(0), cacheHitEntity); + cacheHitEntityObj = Entity.createSingleAttributeEntity(detector.getCategoryFields().get(0), cacheHitEntity); entities.put(cacheHitEntityObj, cacheHitData); tooLongEntity = randomAlphaOfLength(AnomalyDetectorSettings.MAX_ENTITY_LENGTH + 1); tooLongData = new double[] { 0.3 }; - entities.put(Entity.createSingleAttributeEntity(detector.getCategoryField().get(0), tooLongEntity), tooLongData); + entities.put(Entity.createSingleAttributeEntity(detector.getCategoryFields().get(0), tooLongEntity), tooLongData); ModelState state = MLUtil.randomModelState(new RandomModelStateConfig.Builder().fullModel(true).build()); when(entityCache.get(eq(cacheMissEntityObj.getModelId(detectorId).get()), any())).thenReturn(null); diff --git a/src/test/java/org/opensearch/ad/transport/ForwardADTaskRequestTests.java b/src/test/java/org/opensearch/ad/transport/ForwardADTaskRequestTests.java index a543708e3..94cbdee06 100644 --- a/src/test/java/org/opensearch/ad/transport/ForwardADTaskRequestTests.java +++ b/src/test/java/org/opensearch/ad/transport/ForwardADTaskRequestTests.java @@ -11,12 +11,12 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.randomIntervalTimeConfiguration; -import static org.opensearch.ad.TestHelpers.randomQuery; -import static org.opensearch.ad.TestHelpers.randomUser; import static org.opensearch.ad.model.ADTaskAction.CLEAN_CACHE; import static org.opensearch.ad.model.ADTaskAction.CLEAN_STALE_RUNNING_ENTITIES; import static org.opensearch.ad.model.ADTaskAction.START; +import static org.opensearch.timeseries.TestHelpers.randomIntervalTimeConfiguration; +import static org.opensearch.timeseries.TestHelpers.randomQuery; +import static org.opensearch.timeseries.TestHelpers.randomUser; import java.io.IOException; import java.time.Instant; @@ -26,7 +26,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.mock.transport.MockADTaskAction_1_0; import org.opensearch.ad.mock.transport.MockForwardADTaskRequest_1_0; import org.opensearch.ad.model.ADTask; @@ -37,6 +36,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.VersionException; import org.opensearch.timeseries.settings.TimeSeriesSettings; @@ -77,7 +77,8 @@ public void testNullDetectorIdAndTaskAction() throws IOException { Instant.now(), null, randomUser(), - null + null, + TestHelpers.randomImputationOption() ); ForwardADTaskRequest request = new ForwardADTaskRequest(detector, null, null, null, null, Version.V_2_1_0); ActionRequestValidationException validate = request.validate(); diff --git a/src/test/java/org/opensearch/ad/transport/ForwardADTaskTests.java b/src/test/java/org/opensearch/ad/transport/ForwardADTaskTests.java index 3285b9592..29b706dcc 100644 --- a/src/test/java/org/opensearch/ad/transport/ForwardADTaskTests.java +++ b/src/test/java/org/opensearch/ad/transport/ForwardADTaskTests.java @@ -19,7 +19,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.model.ADTaskAction; import org.opensearch.common.io.stream.BytesStreamOutput; @@ -28,6 +27,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/transport/ForwardADTaskTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/ForwardADTaskTransportActionTests.java index ce9d774c1..f2da82c36 100644 --- a/src/test/java/org/opensearch/ad/transport/ForwardADTaskTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/ForwardADTaskTransportActionTests.java @@ -33,13 +33,13 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.ad.ADUnitTestCase; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.task.ADTaskCacheManager; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorResponseTests.java b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorResponseTests.java index 86a8958b8..fa9ff1b8c 100644 --- a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorResponseTests.java +++ b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorResponseTests.java @@ -17,7 +17,6 @@ import java.util.Collection; import org.opensearch.ad.AnomalyDetectorPlugin; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.common.io.stream.NamedWriteableRegistry; @@ -25,6 +24,7 @@ import org.opensearch.rest.RestStatus; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTests.java b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTests.java index d784ba4f9..36c852bed 100644 --- a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTests.java +++ b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTests.java @@ -40,7 +40,6 @@ import org.opensearch.action.get.MultiGetResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.model.ADTask; @@ -57,12 +56,13 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.index.get.GetResult; +import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportService; -public class GetAnomalyDetectorTests extends AbstractADTest { +public class GetAnomalyDetectorTests extends AbstractTimeSeriesTest { private GetAnomalyDetectorTransportAction action; private TransportService transportService; private DiscoveryNodeFilterer nodeFilter; diff --git a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportActionTests.java index 333bd03ab..7b7860f52 100644 --- a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportActionTests.java @@ -27,7 +27,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; @@ -52,6 +51,7 @@ import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorActionTests.java b/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorActionTests.java index 4d62e4a21..b85993272 100644 --- a/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorActionTests.java @@ -18,7 +18,6 @@ import org.junit.Before; import org.junit.Test; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.util.RestHandlerUtils; import org.opensearch.common.io.stream.BytesStreamOutput; @@ -30,6 +29,7 @@ import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestStatus; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java index d5daf76eb..587265d4d 100644 --- a/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java @@ -38,7 +38,6 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.WriteRequest; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.ad.model.AnomalyDetector; @@ -62,6 +61,7 @@ import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.transport.TransportService; @@ -123,7 +123,7 @@ public void setUp() throws Exception { ); task = mock(Task.class); AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); - GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX); + GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX); doAnswer(invocation -> { Object[] args = invocation.getArguments(); assertTrue( diff --git a/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java b/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java index 0ffc6f511..7b74ebcb5 100644 --- a/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java +++ b/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java @@ -69,9 +69,7 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.ad.AbstractADTest; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; @@ -117,6 +115,8 @@ import org.opensearch.test.ClusterServiceUtils; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.InternalFailure; import org.opensearch.timeseries.common.exception.LimitExceededException; @@ -138,7 +138,7 @@ import com.google.common.collect.ImmutableList; -public class MultiEntityResultTests extends AbstractADTest { +public class MultiEntityResultTests extends AbstractTimeSeriesTest { private AnomalyResultTransportAction action; private AnomalyResultRequest request; private TransportInterceptor entityResultInterceptor; diff --git a/src/test/java/org/opensearch/ad/transport/PreviewAnomalyDetectorActionTests.java b/src/test/java/org/opensearch/ad/transport/PreviewAnomalyDetectorActionTests.java index d710369d6..a838090e1 100644 --- a/src/test/java/org/opensearch/ad/transport/PreviewAnomalyDetectorActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/PreviewAnomalyDetectorActionTests.java @@ -15,7 +15,6 @@ import org.junit.Assert; import org.junit.Test; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.common.io.stream.BytesStreamOutput; @@ -23,6 +22,7 @@ import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -47,7 +47,7 @@ public void testPreviewRequest() throws Exception { request.writeTo(out); NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry()); PreviewAnomalyDetectorRequest newRequest = new PreviewAnomalyDetectorRequest(input); - Assert.assertEquals(request.getDetectorId(), newRequest.getDetectorId()); + Assert.assertEquals(request.getId(), newRequest.getId()); Assert.assertEquals(request.getStartTime(), newRequest.getStartTime()); Assert.assertEquals(request.getEndTime(), newRequest.getEndTime()); Assert.assertNotNull(newRequest.getDetector()); diff --git a/src/test/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportActionTests.java index 97e6b1d68..1f20dc119 100644 --- a/src/test/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportActionTests.java @@ -47,7 +47,6 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.WriteRequest; import org.opensearch.ad.AnomalyDetectorRunner; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.feature.Features; @@ -74,6 +73,7 @@ import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.transport.TransportService; @@ -149,12 +149,7 @@ public void setUp() throws Exception { public void testPreviewTransportAction() throws IOException, InterruptedException { final CountDownLatch inProgressLatch = new CountDownLatch(1); AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); - PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest( - detector, - detector.getDetectorId(), - Instant.now(), - Instant.now() - ); + PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest(detector, detector.getId(), Instant.now(), Instant.now()); ActionListener previewResponse = new ActionListener() { @Override public void onResponse(PreviewAnomalyDetectorResponse response) { @@ -196,12 +191,7 @@ public void testPreviewTransportActionWithNoFeature() throws IOException, Interr // Detector with no feature, Preview should fail final CountDownLatch inProgressLatch = new CountDownLatch(1); AnomalyDetector detector = TestHelpers.randomAnomalyDetector(Collections.emptyList()); - PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest( - detector, - detector.getDetectorId(), - Instant.now(), - Instant.now() - ); + PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest(detector, detector.getId(), Instant.now(), Instant.now()); ActionListener previewResponse = new ActionListener() { @Override public void onResponse(PreviewAnomalyDetectorResponse response) { @@ -306,14 +296,9 @@ public void testPreviewTransportActionNoContext() throws IOException, Interrupte circuitBreaker ); AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); - PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest( - detector, - detector.getDetectorId(), - Instant.now(), - Instant.now() - ); + PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest(detector, detector.getId(), Instant.now(), Instant.now()); - GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX); + GetResponse getDetectorResponse = TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX); doAnswer(invocation -> { Object[] args = invocation.getArguments(); assertTrue( @@ -405,12 +390,7 @@ public void onFailure(Exception e) { public void testCircuitBreakerOpen() throws IOException, InterruptedException { // preview has no detector id AnomalyDetector detector = TestHelpers.randomAnomalyDetectorUsingCategoryFields(null, Arrays.asList("a")); - PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest( - detector, - detector.getDetectorId(), - Instant.now(), - Instant.now() - ); + PreviewAnomalyDetectorRequest request = new PreviewAnomalyDetectorRequest(detector, detector.getId(), Instant.now(), Instant.now()); when(circuitBreaker.isOpen()).thenReturn(true); diff --git a/src/test/java/org/opensearch/ad/transport/ProfileTests.java b/src/test/java/org/opensearch/ad/transport/ProfileTests.java index 15c4133fe..affa5d8a1 100644 --- a/src/test/java/org/opensearch/ad/transport/ProfileTests.java +++ b/src/test/java/org/opensearch/ad/transport/ProfileTests.java @@ -117,7 +117,7 @@ public void testProfileNodeRequest() throws IOException { profilesToRetrieve.add(DetectorProfileName.COORDINATING_NODE); ProfileRequest ProfileRequest = new ProfileRequest(detectorId, profilesToRetrieve, false); ProfileNodeRequest ProfileNodeRequest = new ProfileNodeRequest(ProfileRequest); - assertEquals("ProfileNodeRequest has the wrong detector id", ProfileNodeRequest.getDetectorId(), detectorId); + assertEquals("ProfileNodeRequest has the wrong detector id", ProfileNodeRequest.getId(), detectorId); assertEquals("ProfileNodeRequest has the wrong ProfileRequest", ProfileNodeRequest.getProfilesToBeRetrieved(), profilesToRetrieve); // Test serialization @@ -125,7 +125,7 @@ public void testProfileNodeRequest() throws IOException { ProfileNodeRequest.writeTo(output); StreamInput streamInput = output.bytes().streamInput(); ProfileNodeRequest nodeRequest = new ProfileNodeRequest(streamInput); - assertEquals("serialization has the wrong detector id", nodeRequest.getDetectorId(), detectorId); + assertEquals("serialization has the wrong detector id", nodeRequest.getId(), detectorId); assertEquals("serialization has the wrong ProfileRequest", nodeRequest.getProfilesToBeRetrieved(), profilesToRetrieve); } @@ -183,7 +183,7 @@ public void testProfileRequest() throws IOException { readRequest.getProfilesToBeRetrieved(), profileRequest.getProfilesToBeRetrieved() ); - assertEquals("Serialization has the wrong detector id", readRequest.getDetectorId(), profileRequest.getDetectorId()); + assertEquals("Serialization has the wrong detector id", readRequest.getId(), profileRequest.getId()); } @Test diff --git a/src/test/java/org/opensearch/ad/transport/ProfileTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/ProfileTransportActionTests.java index f0788dc5e..70db60b5c 100644 --- a/src/test/java/org/opensearch/ad/transport/ProfileTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/ProfileTransportActionTests.java @@ -145,7 +145,7 @@ public void testNewNodeRequest() { ProfileNodeRequest profileNodeRequest1 = new ProfileNodeRequest(profileRequest); ProfileNodeRequest profileNodeRequest2 = action.newNodeRequest(profileRequest); - assertEquals(profileNodeRequest1.getDetectorId(), profileNodeRequest2.getDetectorId()); + assertEquals(profileNodeRequest1.getId(), profileNodeRequest2.getId()); assertEquals(profileNodeRequest2.getProfilesToBeRetrieved(), profileNodeRequest2.getProfilesToBeRetrieved()); } diff --git a/src/test/java/org/opensearch/ad/transport/RCFPollingTests.java b/src/test/java/org/opensearch/ad/transport/RCFPollingTests.java index 94b555e18..cf6e45e14 100644 --- a/src/test/java/org/opensearch/ad/transport/RCFPollingTests.java +++ b/src/test/java/org/opensearch/ad/transport/RCFPollingTests.java @@ -29,8 +29,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.cluster.HashRing; import org.opensearch.ad.common.exception.JsonPathNotFoundException; import org.opensearch.ad.constant.ADCommonName; @@ -43,6 +41,8 @@ import org.opensearch.common.transport.TransportAddress; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.transport.ConnectTransportException; import org.opensearch.transport.Transport; @@ -60,7 +60,7 @@ import com.google.gson.Gson; import com.google.gson.GsonBuilder; -public class RCFPollingTests extends AbstractADTest { +public class RCFPollingTests extends AbstractTimeSeriesTest { Gson gson = new GsonBuilder().create(); private String detectorId = "jqIG6XIBEyaF3zCMZfcB"; private String model0Id; diff --git a/src/test/java/org/opensearch/ad/transport/SearchADTasksActionTests.java b/src/test/java/org/opensearch/ad/transport/SearchADTasksActionTests.java index 7be934870..0d7ea4d9d 100644 --- a/src/test/java/org/opensearch/ad/transport/SearchADTasksActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/SearchADTasksActionTests.java @@ -11,15 +11,15 @@ package org.opensearch.ad.transport; -import static org.opensearch.ad.TestHelpers.matchAllRequest; +import static org.opensearch.timeseries.TestHelpers.matchAllRequest; import java.io.IOException; import org.junit.Test; import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.HistoricalAnalysisIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; +import org.opensearch.timeseries.TestHelpers; public class SearchADTasksActionTests extends HistoricalAnalysisIntegTestCase { diff --git a/src/test/java/org/opensearch/ad/transport/SearchAnomalyDetectorActionTests.java b/src/test/java/org/opensearch/ad/transport/SearchAnomalyDetectorActionTests.java index f1d93c8ed..96099af31 100644 --- a/src/test/java/org/opensearch/ad/transport/SearchAnomalyDetectorActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/SearchAnomalyDetectorActionTests.java @@ -20,13 +20,13 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.HistoricalAnalysisIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyDetectorType; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/transport/SearchAnomalyDetectorInfoActionTests.java b/src/test/java/org/opensearch/ad/transport/SearchAnomalyDetectorInfoActionTests.java index a710c0aeb..4e56ceb63 100644 --- a/src/test/java/org/opensearch/ad/transport/SearchAnomalyDetectorInfoActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/SearchAnomalyDetectorInfoActionTests.java @@ -16,7 +16,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import static org.opensearch.ad.TestHelpers.createEmptySearchResponse; +import static org.opensearch.timeseries.TestHelpers.createEmptySearchResponse; import java.io.IOException; import java.util.Arrays; @@ -30,7 +30,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; @@ -43,6 +42,7 @@ import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; public class SearchAnomalyDetectorInfoActionTests extends OpenSearchIntegTestCase { diff --git a/src/test/java/org/opensearch/ad/transport/SearchAnomalyResultActionTests.java b/src/test/java/org/opensearch/ad/transport/SearchAnomalyResultActionTests.java index d9b7c2ea8..d5babf203 100644 --- a/src/test/java/org/opensearch/ad/transport/SearchAnomalyResultActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/SearchAnomalyResultActionTests.java @@ -16,10 +16,10 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import static org.opensearch.ad.TestHelpers.createClusterState; -import static org.opensearch.ad.TestHelpers.createSearchResponse; -import static org.opensearch.ad.TestHelpers.matchAllRequest; import static org.opensearch.ad.indices.AnomalyDetectionIndices.ALL_AD_RESULTS_INDEX_PATTERN; +import static org.opensearch.timeseries.TestHelpers.createClusterState; +import static org.opensearch.timeseries.TestHelpers.createSearchResponse; +import static org.opensearch.timeseries.TestHelpers.matchAllRequest; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -40,7 +40,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.ad.HistoricalAnalysisIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.transport.handler.ADSearchHandler; @@ -59,6 +58,7 @@ import org.opensearch.search.aggregations.bucket.terms.StringTerms; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportService; diff --git a/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultActionTests.java b/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultActionTests.java index 7669b8c5b..1e214f209 100644 --- a/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultActionTests.java @@ -15,10 +15,10 @@ import org.junit.Before; import org.opensearch.ad.HistoricalAnalysisIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultRequestTests.java b/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultRequestTests.java index 4a9e5a84b..0668d71ca 100644 --- a/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultRequestTests.java +++ b/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultRequestTests.java @@ -14,11 +14,11 @@ import org.junit.Assert; import org.opensearch.action.ActionRequestValidationException; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; public class SearchTopAnomalyResultRequestTests extends OpenSearchTestCase { @@ -38,7 +38,7 @@ public void testSerialization() throws IOException { originalRequest.writeTo(output); StreamInput input = output.bytes().streamInput(); SearchTopAnomalyResultRequest parsedRequest = new SearchTopAnomalyResultRequest(input); - assertEquals(originalRequest.getDetectorId(), parsedRequest.getDetectorId()); + assertEquals(originalRequest.getId(), parsedRequest.getId()); assertEquals(originalRequest.getTaskId(), parsedRequest.getTaskId()); assertEquals(originalRequest.getHistorical(), parsedRequest.getHistorical()); assertEquals(originalRequest.getSize(), parsedRequest.getSize()); @@ -78,7 +78,7 @@ public void testParse() throws IOException { assertEquals(order, parsedRequest.getOrder()); assertEquals(startTime.toEpochMilli(), parsedRequest.getStartTime().toEpochMilli()); assertEquals(endTime.toEpochMilli(), parsedRequest.getEndTime().toEpochMilli()); - assertEquals(detectorId, parsedRequest.getDetectorId()); + assertEquals(detectorId, parsedRequest.getId()); assertEquals(historical, parsedRequest.getHistorical()); } diff --git a/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultResponseTests.java b/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultResponseTests.java index 841368c36..4f9181081 100644 --- a/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultResponseTests.java +++ b/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultResponseTests.java @@ -9,10 +9,10 @@ import java.util.ArrayList; import java.util.Arrays; -import org.opensearch.ad.TestHelpers; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; public class SearchTopAnomalyResultResponseTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultTransportActionTests.java index 90ad49123..969dc4523 100644 --- a/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/SearchTopAnomalyResultTransportActionTests.java @@ -28,7 +28,6 @@ import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.action.support.ActionFilters; import org.opensearch.ad.ADIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyResultBucket; import org.opensearch.ad.transport.handler.ADSearchHandler; @@ -39,6 +38,7 @@ import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation; import org.opensearch.search.aggregations.metrics.InternalMax; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java index 157dfbfea..7c877c086 100644 --- a/src/test/java/org/opensearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/StatsAnomalyDetectorTransportActionTests.java @@ -16,8 +16,8 @@ import org.junit.Before; import org.opensearch.ad.ADIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.stats.InternalStatNames; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.stats.StatNames; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorRequestTests.java b/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorRequestTests.java index 7846a79cc..135a334b7 100644 --- a/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorRequestTests.java +++ b/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorRequestTests.java @@ -15,13 +15,13 @@ import java.time.Instant; import org.junit.Test; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorResponseTests.java b/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorResponseTests.java index d151ed53a..533bfd52d 100644 --- a/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorResponseTests.java +++ b/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorResponseTests.java @@ -16,14 +16,14 @@ import java.util.Map; import org.junit.Test; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.model.DetectorValidationIssue; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; -public class ValidateAnomalyDetectorResponseTests extends AbstractADTest { +public class ValidateAnomalyDetectorResponseTests extends AbstractTimeSeriesTest { @Test public void testResponseSerialization() throws IOException { diff --git a/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorTransportActionTests.java index bd5ae2065..fa67c3bed 100644 --- a/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/ValidateAnomalyDetectorTransportActionTests.java @@ -19,13 +19,13 @@ import org.junit.Test; import org.opensearch.ad.ADIntegTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.common.unit.TimeValue; import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.ValidationAspect; @@ -368,7 +368,8 @@ public void testValidateAnomalyDetectorWithInvalidDetectorName() throws IOExcept Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ); ingestTestDataValidate(anomalyDetector.getIndices().get(0), Instant.now().minus(1, ChronoUnit.DAYS), 1, "error"); ValidateAnomalyDetectorRequest request = new ValidateAnomalyDetectorRequest( @@ -404,7 +405,8 @@ public void testValidateAnomalyDetectorWithDetectorNameTooLong() throws IOExcept Instant.now(), null, TestHelpers.randomUser(), - null + null, + TestHelpers.randomImputationOption() ); ingestTestDataValidate(anomalyDetector.getIndices().get(0), Instant.now().minus(1, ChronoUnit.DAYS), 1, "error"); ValidateAnomalyDetectorRequest request = new ValidateAnomalyDetectorRequest( diff --git a/src/test/java/org/opensearch/ad/transport/handler/ADSearchHandlerTests.java b/src/test/java/org/opensearch/ad/transport/handler/ADSearchHandlerTests.java index b27d1e8fe..793bdc7b9 100644 --- a/src/test/java/org/opensearch/ad/transport/handler/ADSearchHandlerTests.java +++ b/src/test/java/org/opensearch/ad/transport/handler/ADSearchHandlerTests.java @@ -17,8 +17,8 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import static org.opensearch.ad.TestHelpers.matchAllRequest; import static org.opensearch.ad.settings.AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES; +import static org.opensearch.timeseries.TestHelpers.matchAllRequest; import org.junit.Before; import org.opensearch.action.ActionListener; diff --git a/src/test/java/org/opensearch/ad/transport/handler/AbstractIndexHandlerTest.java b/src/test/java/org/opensearch/ad/transport/handler/AbstractIndexHandlerTest.java index 7f8f9147d..3746c68e0 100644 --- a/src/test/java/org/opensearch/ad/transport/handler/AbstractIndexHandlerTest.java +++ b/src/test/java/org/opensearch/ad/transport/handler/AbstractIndexHandlerTest.java @@ -14,7 +14,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.when; -import static org.opensearch.ad.TestHelpers.createIndexBlockedState; +import static org.opensearch.timeseries.TestHelpers.createIndexBlockedState; import java.io.IOException; import java.util.Arrays; @@ -27,8 +27,6 @@ import org.opensearch.ResourceAlreadyExistsException; import org.opensearch.action.ActionListener; import org.opensearch.action.admin.indices.create.CreateIndexResponse; -import org.opensearch.ad.AbstractADTest; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.ad.transport.AnomalyResultTests; @@ -43,8 +41,10 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; -public abstract class AbstractIndexHandlerTest extends AbstractADTest { +public abstract class AbstractIndexHandlerTest extends AbstractTimeSeriesTest { enum IndexCreation { RUNTIME_EXCEPTION, RESOURCE_EXISTS_EXCEPTION, diff --git a/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandlerTests.java b/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandlerTests.java index d27fefeb9..f799e14cb 100644 --- a/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandlerTests.java +++ b/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandlerTests.java @@ -34,7 +34,6 @@ import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.index.IndexResponse; import org.opensearch.ad.ADUnitTestCase; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.indices.AnomalyDetectionIndices; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.util.ClientUtil; @@ -47,6 +46,7 @@ import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.index.shard.ShardId; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; @@ -98,7 +98,7 @@ public void testNullAnomalyResults() { public void testAnomalyResultBulkIndexHandler_IndexNotExist() { when(anomalyDetectionIndices.doesIndexExist("testIndex")).thenReturn(false); AnomalyResult anomalyResult = mock(AnomalyResult.class); - when(anomalyResult.getDetectorId()).thenReturn("testId"); + when(anomalyResult.getId()).thenReturn("testId"); bulkIndexHandler.bulkIndexAnomalyResult("testIndex", ImmutableList.of(anomalyResult), listener); verify(listener, times(1)).onFailure(exceptionCaptor.capture()); @@ -109,7 +109,7 @@ public void testAnomalyResultBulkIndexHandler_InValidResultIndexMapping() { when(anomalyDetectionIndices.doesIndexExist("testIndex")).thenReturn(true); when(anomalyDetectionIndices.isValidResultIndexMapping("testIndex")).thenReturn(false); AnomalyResult anomalyResult = mock(AnomalyResult.class); - when(anomalyResult.getDetectorId()).thenReturn("testId"); + when(anomalyResult.getId()).thenReturn("testId"); bulkIndexHandler.bulkIndexAnomalyResult("testIndex", ImmutableList.of(anomalyResult), listener); verify(listener, times(1)).onFailure(exceptionCaptor.capture()); @@ -120,7 +120,7 @@ public void testAnomalyResultBulkIndexHandler_FailBulkIndexAnomaly() throws IOEx when(anomalyDetectionIndices.doesIndexExist("testIndex")).thenReturn(true); when(anomalyDetectionIndices.isValidResultIndexMapping("testIndex")).thenReturn(true); AnomalyResult anomalyResult = mock(AnomalyResult.class); - when(anomalyResult.getDetectorId()).thenReturn("testId"); + when(anomalyResult.getId()).thenReturn("testId"); when(anomalyResult.toXContent(any(), any())).thenThrow(new RuntimeException()); bulkIndexHandler.bulkIndexAnomalyResult("testIndex", ImmutableList.of(anomalyResult), listener); diff --git a/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultHandlerTests.java b/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultHandlerTests.java index 31c36cdd3..89367a72b 100644 --- a/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultHandlerTests.java +++ b/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultHandlerTests.java @@ -35,12 +35,12 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.action.index.IndexResponse; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; public class AnomalyResultHandlerTests extends AbstractIndexHandlerTest { diff --git a/src/test/java/org/opensearch/ad/transport/handler/MultiEntityResultHandlerTests.java b/src/test/java/org/opensearch/ad/transport/handler/MultiEntityResultHandlerTests.java index 169c50482..4c8446577 100644 --- a/src/test/java/org/opensearch/ad/transport/handler/MultiEntityResultHandlerTests.java +++ b/src/test/java/org/opensearch/ad/transport/handler/MultiEntityResultHandlerTests.java @@ -24,12 +24,12 @@ import org.junit.Test; import org.mockito.ArgumentMatchers; import org.opensearch.action.ActionListener; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.ratelimit.RequestPriority; import org.opensearch.ad.ratelimit.ResultWriteRequest; import org.opensearch.ad.transport.ADResultBulkAction; import org.opensearch.ad.transport.ADResultBulkRequest; import org.opensearch.ad.transport.ADResultBulkResponse; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; public class MultiEntityResultHandlerTests extends AbstractIndexHandlerTest { diff --git a/src/test/java/org/opensearch/ad/util/IndexUtilsTests.java b/src/test/java/org/opensearch/ad/util/IndexUtilsTests.java index b57459c7a..bea6abf95 100644 --- a/src/test/java/org/opensearch/ad/util/IndexUtilsTests.java +++ b/src/test/java/org/opensearch/ad/util/IndexUtilsTests.java @@ -18,12 +18,12 @@ import org.junit.Before; import org.junit.Test; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.ad.TestHelpers; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.TestHelpers; public class IndexUtilsTests extends OpenSearchIntegTestCase { diff --git a/src/test/java/org/opensearch/ad/util/MultiResponsesDelegateActionListenerTests.java b/src/test/java/org/opensearch/ad/util/MultiResponsesDelegateActionListenerTests.java index d7b385a8d..b905ce623 100644 --- a/src/test/java/org/opensearch/ad/util/MultiResponsesDelegateActionListenerTests.java +++ b/src/test/java/org/opensearch/ad/util/MultiResponsesDelegateActionListenerTests.java @@ -13,7 +13,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; -import static org.opensearch.ad.TestHelpers.randomHCADAnomalyDetectResult; +import static org.opensearch.timeseries.TestHelpers.randomHCADAnomalyDetectResult; import java.util.ArrayList; import java.util.concurrent.CountDownLatch; diff --git a/src/test/java/org/opensearch/ad/util/ParseUtilsTests.java b/src/test/java/org/opensearch/ad/util/ParseUtilsTests.java index 3fa987aa6..188d67e59 100644 --- a/src/test/java/org/opensearch/ad/util/ParseUtilsTests.java +++ b/src/test/java/org/opensearch/ad/util/ParseUtilsTests.java @@ -19,7 +19,6 @@ import java.time.temporal.ChronoUnit; import java.util.List; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.common.ParsingException; import org.opensearch.common.xcontent.XContentFactory; @@ -30,6 +29,7 @@ import org.opensearch.search.aggregations.AggregatorFactories; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.util.ParseUtils; @@ -125,14 +125,6 @@ public void testGenerateInternalFeatureQuery() throws IOException { } } - public void testGenerateInternalFeatureQueryTemplate() throws IOException { - AnomalyDetector detector = TestHelpers.randomAnomalyDetector(null, Instant.now()); - String builder = ParseUtils.generateInternalFeatureQueryTemplate(detector, TestHelpers.xContentRegistry()); - for (Feature feature : detector.getFeatureAttributes()) { - assertTrue(builder.contains(feature.getId())); - } - } - public void testAddUserRoleFilterWithNullUser() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); addUserBackendRolesFilter(null, searchSourceBuilder); diff --git a/src/test/java/org/opensearch/ad/util/RestHandlerUtilsTests.java b/src/test/java/org/opensearch/ad/util/RestHandlerUtilsTests.java index b4412ddda..b7bb84dd2 100644 --- a/src/test/java/org/opensearch/ad/util/RestHandlerUtilsTests.java +++ b/src/test/java/org/opensearch/ad/util/RestHandlerUtilsTests.java @@ -11,13 +11,12 @@ package org.opensearch.ad.util; -import static org.opensearch.ad.TestHelpers.builder; -import static org.opensearch.ad.TestHelpers.randomFeature; import static org.opensearch.ad.util.RestHandlerUtils.OPENSEARCH_DASHBOARDS_USER_AGENT; +import static org.opensearch.timeseries.TestHelpers.builder; +import static org.opensearch.timeseries.TestHelpers.randomFeature; import java.io.IOException; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.common.bytes.BytesReference; import org.opensearch.core.xcontent.NamedXContentRegistry; @@ -30,6 +29,7 @@ import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.rest.FakeRestChannel; import org.opensearch.test.rest.FakeRestRequest; +import org.opensearch.timeseries.TestHelpers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/util/ThrottlerTests.java b/src/test/java/org/opensearch/ad/util/ThrottlerTests.java index ed2a6f710..61bb19ec8 100644 --- a/src/test/java/org/opensearch/ad/util/ThrottlerTests.java +++ b/src/test/java/org/opensearch/ad/util/ThrottlerTests.java @@ -34,11 +34,11 @@ public void setup() { @Test public void testGetFilteredQuery() { AnomalyDetector detector = mock(AnomalyDetector.class); - when(detector.getDetectorId()).thenReturn("test detector Id"); + when(detector.getId()).thenReturn("test detector Id"); SearchRequest dummySearchRequest = new SearchRequest(); - throttler.insertFilteredQuery(detector.getDetectorId(), dummySearchRequest); + throttler.insertFilteredQuery(detector.getId(), dummySearchRequest); // case 1: key exists - assertTrue(throttler.getFilteredQuery(detector.getDetectorId()).isPresent()); + assertTrue(throttler.getFilteredQuery(detector.getId()).isPresent()); // case 2: key doesn't exist assertFalse(throttler.getFilteredQuery("different test detector Id").isPresent()); } @@ -46,22 +46,22 @@ public void testGetFilteredQuery() { @Test public void testInsertFilteredQuery() { AnomalyDetector detector = mock(AnomalyDetector.class); - when(detector.getDetectorId()).thenReturn("test detector Id"); + when(detector.getId()).thenReturn("test detector Id"); SearchRequest dummySearchRequest = new SearchRequest(); // first time: key doesn't exist - assertTrue(throttler.insertFilteredQuery(detector.getDetectorId(), dummySearchRequest)); + assertTrue(throttler.insertFilteredQuery(detector.getId(), dummySearchRequest)); // second time: key exists - assertFalse(throttler.insertFilteredQuery(detector.getDetectorId(), dummySearchRequest)); + assertFalse(throttler.insertFilteredQuery(detector.getId(), dummySearchRequest)); } @Test public void testClearFilteredQuery() { AnomalyDetector detector = mock(AnomalyDetector.class); - when(detector.getDetectorId()).thenReturn("test detector Id"); + when(detector.getId()).thenReturn("test detector Id"); SearchRequest dummySearchRequest = new SearchRequest(); - assertTrue(throttler.insertFilteredQuery(detector.getDetectorId(), dummySearchRequest)); - throttler.clearFilteredQuery(detector.getDetectorId()); - assertTrue(throttler.insertFilteredQuery(detector.getDetectorId(), dummySearchRequest)); + assertTrue(throttler.insertFilteredQuery(detector.getId(), dummySearchRequest)); + throttler.clearFilteredQuery(detector.getId()); + assertTrue(throttler.insertFilteredQuery(detector.getId(), dummySearchRequest)); } } diff --git a/src/test/java/org/opensearch/forecast/model/ForecastSerializationTests.java b/src/test/java/org/opensearch/forecast/model/ForecastSerializationTests.java new file mode 100644 index 000000000..e7adbfc63 --- /dev/null +++ b/src/test/java/org/opensearch/forecast/model/ForecastSerializationTests.java @@ -0,0 +1,85 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.forecast.model; + +import java.io.IOException; +import java.util.Collection; + +import org.opensearch.ad.AnomalyDetectorPlugin; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.plugins.Plugin; +import org.opensearch.test.InternalSettingsPlugin; +import org.opensearch.test.OpenSearchSingleNodeTestCase; +import org.opensearch.timeseries.TestHelpers; + +public class ForecastSerializationTests extends OpenSearchSingleNodeTestCase { + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class, AnomalyDetectorPlugin.class); + } + + @Override + protected NamedWriteableRegistry writableRegistry() { + return getInstanceFromNode(NamedWriteableRegistry.class); + } + + public void testStreamConstructor() throws IOException { + Forecaster forecaster = TestHelpers.randomForecaster(); + + BytesStreamOutput output = new BytesStreamOutput(); + + forecaster.writeTo(output); + NamedWriteableAwareStreamInput streamInput = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + Forecaster parsedForecaster = new Forecaster(streamInput); + assertTrue(parsedForecaster.equals(forecaster)); + } + + public void testStreamConstructorNullUser() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setUser(null).build(); + + BytesStreamOutput output = new BytesStreamOutput(); + + forecaster.writeTo(output); + NamedWriteableAwareStreamInput streamInput = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + Forecaster parsedForecaster = new Forecaster(streamInput); + assertTrue(parsedForecaster.equals(forecaster)); + } + + public void testStreamConstructorNullUiMeta() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setUiMetadata(null).build(); + + BytesStreamOutput output = new BytesStreamOutput(); + + forecaster.writeTo(output); + NamedWriteableAwareStreamInput streamInput = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + Forecaster parsedForecaster = new Forecaster(streamInput); + assertTrue(parsedForecaster.equals(forecaster)); + } + + public void testStreamConstructorNullCustomResult() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setCustomResultIndex(null).build(); + + BytesStreamOutput output = new BytesStreamOutput(); + + forecaster.writeTo(output); + NamedWriteableAwareStreamInput streamInput = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + Forecaster parsedForecaster = new Forecaster(streamInput); + assertTrue(parsedForecaster.equals(forecaster)); + } + + public void testStreamConstructorNullImputationOption() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setNullImputationOption().build(); + + BytesStreamOutput output = new BytesStreamOutput(); + + forecaster.writeTo(output); + NamedWriteableAwareStreamInput streamInput = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); + Forecaster parsedForecaster = new Forecaster(streamInput); + assertTrue(parsedForecaster.equals(forecaster)); + } +} diff --git a/src/test/java/org/opensearch/forecast/model/ForecasterTests.java b/src/test/java/org/opensearch/forecast/model/ForecasterTests.java new file mode 100644 index 000000000..0b64912bf --- /dev/null +++ b/src/test/java/org/opensearch/forecast/model/ForecasterTests.java @@ -0,0 +1,396 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.forecast.model; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.Matchers.is; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.hamcrest.MatcherAssert; +import org.opensearch.commons.authuser.User; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.forecast.constant.ForecastCommonMessages; +import org.opensearch.forecast.constant.ForecastCommonName; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.TestHelpers; +import org.opensearch.timeseries.common.exception.ValidationException; +import org.opensearch.timeseries.constant.CommonMessages; +import org.opensearch.timeseries.dataprocessor.ImputationOption; +import org.opensearch.timeseries.model.Feature; +import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.TimeConfiguration; +import org.opensearch.timeseries.model.ValidationAspect; +import org.opensearch.timeseries.model.ValidationIssueType; + +public class ForecasterTests extends AbstractTimeSeriesTest { + TimeConfiguration forecastInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES); + TimeConfiguration windowDelay = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES); + String forecasterId = "testId"; + Long version = 1L; + String name = "testName"; + String description = "testDescription"; + String timeField = "testTimeField"; + List indices = Collections.singletonList("testIndex"); + List features = Collections.emptyList(); // Assuming no features for simplicity + MatchAllQueryBuilder filterQuery = QueryBuilders.matchAllQuery(); + Integer shingleSize = 1; + Map uiMetadata = new HashMap<>(); + Integer schemaVersion = 1; + Instant lastUpdateTime = Instant.now(); + List categoryFields = Arrays.asList("field1", "field2"); + User user = new User("testUser", Collections.emptyList(), Collections.emptyList(), Collections.emptyList()); + String resultIndex = null; + Integer horizon = 1; + + public void testForecasterConstructor() { + ImputationOption imputationOption = TestHelpers.randomImputationOption(); + + Forecaster forecaster = new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + imputationOption + ); + + assertEquals(forecasterId, forecaster.getId()); + assertEquals(version, forecaster.getVersion()); + assertEquals(name, forecaster.getName()); + assertEquals(description, forecaster.getDescription()); + assertEquals(timeField, forecaster.getTimeField()); + assertEquals(indices, forecaster.getIndices()); + assertEquals(features, forecaster.getFeatureAttributes()); + assertEquals(filterQuery, forecaster.getFilterQuery()); + assertEquals(forecastInterval, forecaster.getInterval()); + assertEquals(windowDelay, forecaster.getWindowDelay()); + assertEquals(shingleSize, forecaster.getShingleSize()); + assertEquals(uiMetadata, forecaster.getUiMetadata()); + assertEquals(schemaVersion, forecaster.getSchemaVersion()); + assertEquals(lastUpdateTime, forecaster.getLastUpdateTime()); + assertEquals(categoryFields, forecaster.getCategoryFields()); + assertEquals(user, forecaster.getUser()); + assertEquals(resultIndex, forecaster.getCustomResultIndex()); + assertEquals(horizon, forecaster.getHorizon()); + assertEquals(imputationOption, forecaster.getImputationOption()); + } + + public void testForecasterConstructorWithNullForecastInterval() { + TimeConfiguration forecastInterval = null; + + ValidationException ex = expectThrows(ValidationException.class, () -> { + new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + TestHelpers.randomImputationOption() + ); + }); + + MatcherAssert.assertThat(ex.getMessage(), containsString(ForecastCommonMessages.NULL_FORECAST_INTERVAL)); + MatcherAssert.assertThat(ex.getType(), is(ValidationIssueType.FORECAST_INTERVAL)); + MatcherAssert.assertThat(ex.getAspect(), is(ValidationAspect.FORECASTER)); + } + + public void testNegativeInterval() { + var forecastInterval = new IntervalTimeConfiguration(0, ChronoUnit.MINUTES); // An interval less than or equal to zero + + ValidationException ex = expectThrows(ValidationException.class, () -> { + new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + TestHelpers.randomImputationOption() + ); + }); + + MatcherAssert.assertThat(ex.getMessage(), containsString(ForecastCommonMessages.INVALID_FORECAST_INTERVAL)); + MatcherAssert.assertThat(ex.getType(), is(ValidationIssueType.FORECAST_INTERVAL)); + MatcherAssert.assertThat(ex.getAspect(), is(ValidationAspect.FORECASTER)); + } + + public void testMaxCategoryFieldsLimits() { + List categoryFields = Arrays.asList("field1", "field2", "field3"); + + ValidationException ex = expectThrows(ValidationException.class, () -> { + new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + TestHelpers.randomImputationOption() + ); + }); + + MatcherAssert.assertThat(ex.getMessage(), containsString(CommonMessages.getTooManyCategoricalFieldErr(2))); + MatcherAssert.assertThat(ex.getType(), is(ValidationIssueType.CATEGORY)); + MatcherAssert.assertThat(ex.getAspect(), is(ValidationAspect.FORECASTER)); + } + + public void testBlankName() { + String name = ""; + + ValidationException ex = expectThrows(ValidationException.class, () -> { + new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + TestHelpers.randomImputationOption() + ); + }); + + MatcherAssert.assertThat(ex.getMessage(), containsString(CommonMessages.EMPTY_NAME)); + MatcherAssert.assertThat(ex.getType(), is(ValidationIssueType.NAME)); + MatcherAssert.assertThat(ex.getAspect(), is(ValidationAspect.FORECASTER)); + } + + public void testInvalidCustomResultIndex() { + String resultIndex = "test"; + + ValidationException ex = expectThrows(ValidationException.class, () -> { + new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + TestHelpers.randomImputationOption() + ); + }); + + MatcherAssert.assertThat(ex.getMessage(), containsString(ForecastCommonMessages.INVALID_RESULT_INDEX_PREFIX)); + MatcherAssert.assertThat(ex.getType(), is(ValidationIssueType.RESULT_INDEX)); + MatcherAssert.assertThat(ex.getAspect(), is(ValidationAspect.FORECASTER)); + } + + public void testValidCustomResultIndex() { + String resultIndex = ForecastCommonName.CUSTOM_RESULT_INDEX_PREFIX + "test"; + + var forecaster = new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + TestHelpers.randomImputationOption() + ); + + assertEquals(resultIndex, forecaster.getCustomResultIndex()); + } + + public void testInvalidHorizon() { + int horizon = 0; + + ValidationException ex = expectThrows(ValidationException.class, () -> { + new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + TestHelpers.randomImputationOption() + ); + }); + + MatcherAssert.assertThat(ex.getMessage(), containsString("Horizon size must be a positive integer no larger than")); + MatcherAssert.assertThat(ex.getType(), is(ValidationIssueType.SHINGLE_SIZE_FIELD)); + MatcherAssert.assertThat(ex.getAspect(), is(ValidationAspect.FORECASTER)); + } + + public void testParse() throws IOException { + Forecaster forecaster = TestHelpers.randomForecaster(); + String forecasterString = TestHelpers + .xContentBuilderToString(forecaster.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(forecasterString); + Forecaster parsedForecaster = Forecaster.parse(TestHelpers.parser(forecasterString)); + assertEquals("Parsing forecaster doesn't work", forecaster, parsedForecaster); + } + + public void testParseEmptyMetaData() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setUiMetadata(null).build(); + String forecasterString = TestHelpers + .xContentBuilderToString(forecaster.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(forecasterString); + Forecaster parsedForecaster = Forecaster.parse(TestHelpers.parser(forecasterString)); + assertEquals("Parsing forecaster doesn't work", forecaster, parsedForecaster); + } + + public void testParseNullLastUpdateTime() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setLastUpdateTime(null).build(); + String forecasterString = TestHelpers + .xContentBuilderToString(forecaster.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(forecasterString); + Forecaster parsedForecaster = Forecaster.parse(TestHelpers.parser(forecasterString)); + assertEquals("Parsing forecaster doesn't work", forecaster, parsedForecaster); + } + + public void testParseNullCategoryFields() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setCategoryFields(null).build(); + String forecasterString = TestHelpers + .xContentBuilderToString(forecaster.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(forecasterString); + Forecaster parsedForecaster = Forecaster.parse(TestHelpers.parser(forecasterString)); + assertEquals("Parsing forecaster doesn't work", forecaster, parsedForecaster); + } + + public void testParseNullUser() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setUser(null).build(); + String forecasterString = TestHelpers + .xContentBuilderToString(forecaster.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(forecasterString); + Forecaster parsedForecaster = Forecaster.parse(TestHelpers.parser(forecasterString)); + assertEquals("Parsing forecaster doesn't work", forecaster, parsedForecaster); + } + + public void testParseNullCustomResultIndex() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setCustomResultIndex(null).build(); + String forecasterString = TestHelpers + .xContentBuilderToString(forecaster.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(forecasterString); + Forecaster parsedForecaster = Forecaster.parse(TestHelpers.parser(forecasterString)); + assertEquals("Parsing forecaster doesn't work", forecaster, parsedForecaster); + } + + public void testParseNullImpute() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setNullImputationOption().build(); + String forecasterString = TestHelpers + .xContentBuilderToString(forecaster.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + LOG.info(forecasterString); + Forecaster parsedForecaster = Forecaster.parse(TestHelpers.parser(forecasterString)); + assertEquals("Parsing forecaster doesn't work", forecaster, parsedForecaster); + } + + public void testGetImputer() throws IOException { + Forecaster forecaster = TestHelpers.randomForecaster(); + assertTrue(null != forecaster.getImputer()); + } + + public void testGetImputerNullImputer() throws IOException { + Forecaster forecaster = TestHelpers.ForecasterBuilder.newInstance().setNullImputationOption().build(); + assertTrue(null != forecaster.getImputer()); + } +} diff --git a/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityProfileTests.java b/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityProfileTests.java index c5687b7e0..7d9f9b1b2 100644 --- a/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityProfileTests.java +++ b/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityProfileTests.java @@ -32,7 +32,6 @@ import org.opensearch.ad.AbstractProfileRunnerTests; import org.opensearch.ad.AnomalyDetectorProfileRunner; import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.TestHelpers; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyDetectorJob; @@ -45,6 +44,7 @@ import org.opensearch.common.util.BigArrays; import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.IntervalTimeConfiguration; @@ -96,7 +96,7 @@ private void setUpMultiEntityClientGet(DetectorStatus detectorStatus, JobStatus if (request.index().equals(CommonName.CONFIG_INDEX)) { switch (detectorStatus) { case EXIST: - listener.onResponse(TestHelpers.createGetResponse(detector, detector.getDetectorId(), CommonName.CONFIG_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(detector, detector.getId(), CommonName.CONFIG_INDEX)); break; default: assertTrue("should not reach here", false); @@ -107,7 +107,7 @@ private void setUpMultiEntityClientGet(DetectorStatus detectorStatus, JobStatus switch (jobStatus) { case ENABLED: job = TestHelpers.randomAnomalyDetectorJob(true); - listener.onResponse(TestHelpers.createGetResponse(job, detector.getDetectorId(), CommonName.JOB_INDEX)); + listener.onResponse(TestHelpers.createGetResponse(job, detector.getId(), CommonName.JOB_INDEX)); break; default: assertTrue("should not reach here", false); @@ -214,7 +214,7 @@ public void testFailGetEntityStats() throws IOException, InterruptedException { final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertTrue("Should not reach here ", false); inProgressLatch.countDown(); }, exception -> { @@ -236,7 +236,7 @@ public void testNoResultsNoError() throws IOException, InterruptedException { final AtomicInteger called = new AtomicInteger(0); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertTrue(response.getInitProgress() != null); called.getAndIncrement(); }, exception -> { @@ -258,7 +258,7 @@ public void testFailConfirmInitted() throws IOException, InterruptedException { final CountDownLatch inProgressLatch = new CountDownLatch(1); - runner.profile(detector.getDetectorId(), ActionListener.wrap(response -> { + runner.profile(detector.getId(), ActionListener.wrap(response -> { assertTrue("Should not reach here ", false); inProgressLatch.countDown(); }, exception -> { diff --git a/src/test/java/org/opensearch/ad/AbstractADTest.java b/src/test/java/org/opensearch/timeseries/AbstractTimeSeriesTest.java similarity index 98% rename from src/test/java/org/opensearch/ad/AbstractADTest.java rename to src/test/java/org/opensearch/timeseries/AbstractTimeSeriesTest.java index 5ac40defe..c3f5c161b 100644 --- a/src/test/java/org/opensearch/ad/AbstractADTest.java +++ b/src/test/java/org/opensearch/timeseries/AbstractTimeSeriesTest.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; import static org.hamcrest.Matchers.containsString; import static org.mockito.ArgumentMatchers.any; @@ -43,6 +43,7 @@ import org.opensearch.Version; import org.opensearch.action.ActionResponse; import org.opensearch.action.support.PlainActionFuture; +import org.opensearch.ad.AnomalyDetectorPlugin; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.AnomalyResult; @@ -69,9 +70,9 @@ import test.org.opensearch.ad.util.FakeNode; -public class AbstractADTest extends OpenSearchTestCase { +public class AbstractTimeSeriesTest extends OpenSearchTestCase { - protected static final Logger LOG = (Logger) LogManager.getLogger(AbstractADTest.class); + protected static final Logger LOG = (Logger) LogManager.getLogger(AbstractTimeSeriesTest.class); // transport test node protected int nodesCount; @@ -213,7 +214,7 @@ private String convertToRegex(String formattedStr) { protected TestAppender testAppender; - Logger logger; + protected Logger logger; /** * Set up test with junit that a warning was logged with log4j diff --git a/src/test/java/org/opensearch/ad/TestHelpers.java b/src/test/java/org/opensearch/timeseries/TestHelpers.java similarity index 88% rename from src/test/java/org/opensearch/ad/TestHelpers.java rename to src/test/java/org/opensearch/timeseries/TestHelpers.java index 5fcdbccd6..8da392c21 100644 --- a/src/test/java/org/opensearch/ad/TestHelpers.java +++ b/src/test/java/org/opensearch/timeseries/TestHelpers.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; import static org.apache.hc.core5.http.ContentType.APPLICATION_JSON; import static org.opensearch.cluster.node.DiscoveryNodeRole.BUILT_IN_ROLES; @@ -32,10 +32,12 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.concurrent.Callable; import java.util.function.Consumer; +import java.util.stream.DoubleStream; import java.util.stream.IntStream; import org.apache.hc.core5.http.ContentType; @@ -97,6 +99,7 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.CheckedConsumer; import org.opensearch.common.Priority; +import org.opensearch.common.Randomness; import org.opensearch.common.UUIDs; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.bytes.BytesReference; @@ -113,6 +116,7 @@ import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.forecast.model.Forecaster; import org.opensearch.index.get.GetResult; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchAllQueryBuilder; @@ -130,9 +134,12 @@ import org.opensearch.search.profile.SearchProfileShardResults; import org.opensearch.search.suggest.Suggest; import org.opensearch.test.ClusterServiceUtils; +import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.rest.OpenSearchRestTestCase; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.dataprocessor.ImputationMethod; +import org.opensearch.timeseries.dataprocessor.ImputationOption; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.IntervalTimeConfiguration; @@ -311,7 +318,8 @@ public static AnomalyDetector randomAnomalyDetector( lastUpdateTime, categoryFields, user, - null + null, + TestHelpers.randomImputationOption() ); } @@ -355,7 +363,8 @@ public static AnomalyDetector randomDetector( Instant.now(), categoryFields, null, - resultIndex + resultIndex, + TestHelpers.randomImputationOption() ); } @@ -409,7 +418,8 @@ public static AnomalyDetector randomAnomalyDetectorUsingCategoryFields( Instant.now(), categoryFields, randomUser(), - resultIndex + resultIndex, + TestHelpers.randomImputationOption() ); } @@ -439,7 +449,8 @@ public static AnomalyDetector randomAnomalyDetector(String timefield, String ind Instant.now(), null, randomUser(), - null + null, + TestHelpers.randomImputationOption() ); } @@ -461,7 +472,8 @@ public static AnomalyDetector randomAnomalyDetectorWithEmptyFeature() throws IOE Instant.now().truncatedTo(ChronoUnit.SECONDS), null, randomUser(), - null + null, + TestHelpers.randomImputationOption() ); } @@ -488,7 +500,8 @@ public static AnomalyDetector randomAnomalyDetectorWithInterval(TimeConfiguratio Instant.now().truncatedTo(ChronoUnit.SECONDS), categoryField, randomUser(), - null + null, + TestHelpers.randomImputationOption() ); } @@ -516,6 +529,7 @@ public static class AnomalyDetectorBuilder { private List categoryFields = null; private User user = randomUser(); private String resultIndex = null; + private ImputationOption imputationOption = null; public static AnomalyDetectorBuilder newInstance() throws IOException { return new AnomalyDetectorBuilder(); @@ -610,6 +624,11 @@ public AnomalyDetectorBuilder setResultIndex(String resultIndex) { return this; } + public AnomalyDetectorBuilder setImputationOption(ImputationMethod method, Optional defaultFill, boolean integerSentive) { + this.imputationOption = new ImputationOption(method, defaultFill, integerSentive); + return this; + } + public AnomalyDetector build() { return new AnomalyDetector( detectorId, @@ -628,7 +647,8 @@ public AnomalyDetector build() { lastUpdateTime, categoryFields, user, - resultIndex + resultIndex, + imputationOption ); } } @@ -653,7 +673,8 @@ public static AnomalyDetector randomAnomalyDetectorWithInterval(TimeConfiguratio Instant.now().truncatedTo(ChronoUnit.SECONDS), categoryField, randomUser(), - null + null, + TestHelpers.randomImputationOption() ); } @@ -1274,7 +1295,7 @@ public static ADTask randomAdTask( executionEndTime = executionEndTime == null ? null : executionEndTime.truncatedTo(ChronoUnit.SECONDS); Entity entity = null; if (ADTaskType.HISTORICAL_HC_ENTITY == adTaskType) { - List categoryField = detector.getCategoryField(); + List categoryField = detector.getCategoryFields(); if (categoryField != null) { if (categoryField.size() == 1) { entity = Entity.createSingleAttributeEntity(categoryField.get(0), randomAlphaOfLength(5)); @@ -1377,12 +1398,12 @@ public static ADTask randomAdTask( executionEndTime = executionEndTime == null ? null : executionEndTime.truncatedTo(ChronoUnit.SECONDS); Entity entity = null; if (detector != null) { - if (detector.isMultiCategoryDetector()) { + if (detector.hasMultipleCategories()) { Map attrMap = new HashMap<>(); - detector.getCategoryField().stream().forEach(f -> attrMap.put(f, randomAlphaOfLength(5))); + detector.getCategoryFields().stream().forEach(f -> attrMap.put(f, randomAlphaOfLength(5))); entity = Entity.createEntityByReordering(attrMap); - } else if (detector.isMultientityDetector()) { - entity = Entity.createEntityByReordering(ImmutableMap.of(detector.getCategoryField().get(0), randomAlphaOfLength(5))); + } else if (detector.isHighCardinality()) { + entity = Entity.createEntityByReordering(ImmutableMap.of(detector.getCategoryFields().get(0), randomAlphaOfLength(5))); } } String taskType = entity == null ? ADTaskType.HISTORICAL_SINGLE_ENTITY.name() : ADTaskType.HISTORICAL_HC_ENTITY.name(); @@ -1547,4 +1568,205 @@ public static ClusterState createClusterState() { ); return clusterState; } + + public static ImputationOption randomImputationOption() { + double[] defaultFill = DoubleStream.generate(OpenSearchTestCase::randomDouble).limit(10).toArray(); + ImputationOption fixedValue = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill), false); + ImputationOption linear = new ImputationOption(ImputationMethod.LINEAR, Optional.of(defaultFill), false); + ImputationOption linearIntSensitive = new ImputationOption(ImputationMethod.LINEAR, Optional.of(defaultFill), true); + ImputationOption zero = new ImputationOption(ImputationMethod.ZERO); + ImputationOption previous = new ImputationOption(ImputationMethod.PREVIOUS); + + List options = List.of(fixedValue, linear, linearIntSensitive, zero, previous); + + // Select a random option + int randomIndex = Randomness.get().nextInt(options.size()); + return options.get(randomIndex); + } + + public static class ForecasterBuilder { + String forecasterId; + Long version; + String name; + String description; + String timeField; + List indices; + List features; + QueryBuilder filterQuery; + TimeConfiguration forecastInterval; + TimeConfiguration windowDelay; + Integer shingleSize; + Map uiMetadata; + Integer schemaVersion; + Instant lastUpdateTime; + List categoryFields; + User user; + String resultIndex; + Integer horizon; + ImputationOption imputationOption; + + ForecasterBuilder() throws IOException { + forecasterId = randomAlphaOfLength(10); + version = randomLong(); + name = randomAlphaOfLength(10); + description = randomAlphaOfLength(20); + timeField = randomAlphaOfLength(5); + indices = ImmutableList.of(randomAlphaOfLength(10)); + features = ImmutableList.of(randomFeature()); + filterQuery = randomQuery(); + forecastInterval = randomIntervalTimeConfiguration(); + windowDelay = randomIntervalTimeConfiguration(); + shingleSize = randomIntBetween(1, 20); + uiMetadata = ImmutableMap.of(randomAlphaOfLength(5), randomAlphaOfLength(10)); + schemaVersion = randomInt(); + lastUpdateTime = Instant.now().truncatedTo(ChronoUnit.SECONDS); + categoryFields = ImmutableList.of(randomAlphaOfLength(5)); + user = randomUser(); + resultIndex = null; + horizon = randomIntBetween(1, 20); + imputationOption = randomImputationOption(); + } + + public static ForecasterBuilder newInstance() throws IOException { + return new ForecasterBuilder(); + } + + public ForecasterBuilder setConfigId(String configId) { + this.forecasterId = configId; + return this; + } + + public ForecasterBuilder setVersion(Long version) { + this.version = version; + return this; + } + + public ForecasterBuilder setName(String name) { + this.name = name; + return this; + } + + public ForecasterBuilder setDescription(String description) { + this.description = description; + return this; + } + + public ForecasterBuilder setTimeField(String timeField) { + this.timeField = timeField; + return this; + } + + public ForecasterBuilder setIndices(List indices) { + this.indices = indices; + return this; + } + + public ForecasterBuilder setFeatureAttributes(List featureAttributes) { + this.features = featureAttributes; + return this; + } + + public ForecasterBuilder setFilterQuery(QueryBuilder filterQuery) { + this.filterQuery = filterQuery; + return this; + } + + public ForecasterBuilder setDetectionInterval(TimeConfiguration forecastInterval) { + this.forecastInterval = forecastInterval; + return this; + } + + public ForecasterBuilder setWindowDelay(TimeConfiguration windowDelay) { + this.windowDelay = windowDelay; + return this; + } + + public ForecasterBuilder setShingleSize(Integer shingleSize) { + this.shingleSize = shingleSize; + return this; + } + + public ForecasterBuilder setUiMetadata(Map uiMetadata) { + this.uiMetadata = uiMetadata; + return this; + } + + public ForecasterBuilder setSchemaVersion(Integer schemaVersion) { + this.schemaVersion = schemaVersion; + return this; + } + + public ForecasterBuilder setLastUpdateTime(Instant lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + return this; + } + + public ForecasterBuilder setCategoryFields(List categoryFields) { + this.categoryFields = categoryFields; + return this; + } + + public ForecasterBuilder setUser(User user) { + this.user = user; + return this; + } + + public ForecasterBuilder setCustomResultIndex(String resultIndex) { + this.resultIndex = resultIndex; + return this; + } + + public ForecasterBuilder setNullImputationOption() { + this.imputationOption = null; + return this; + } + + public Forecaster build() { + return new Forecaster( + forecasterId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + forecastInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + categoryFields, + user, + resultIndex, + horizon, + imputationOption + ); + } + } + + public static Forecaster randomForecaster() throws IOException { + return new Forecaster( + randomAlphaOfLength(10), + randomLong(), + randomAlphaOfLength(10), + randomAlphaOfLength(20), + randomAlphaOfLength(5), + ImmutableList.of(randomAlphaOfLength(10)), + ImmutableList.of(randomFeature()), + randomQuery(), + randomIntervalTimeConfiguration(), + randomIntervalTimeConfiguration(), + randomIntBetween(1, 20), + ImmutableMap.of(randomAlphaOfLength(5), randomAlphaOfLength(10)), + randomInt(), + Instant.now().truncatedTo(ChronoUnit.SECONDS), + ImmutableList.of(randomAlphaOfLength(5)), + randomUser(), + null, + randomIntBetween(1, 20), + randomImputationOption() + ); + } } diff --git a/src/test/java/org/opensearch/ad/common/exception/ADValidationExceptionTests.java b/src/test/java/org/opensearch/timeseries/common/exception/ValidationExceptionTests.java similarity index 75% rename from src/test/java/org/opensearch/ad/common/exception/ADValidationExceptionTests.java rename to src/test/java/org/opensearch/timeseries/common/exception/ValidationExceptionTests.java index c97eb626c..bfcd5ad7a 100644 --- a/src/test/java/org/opensearch/ad/common/exception/ADValidationExceptionTests.java +++ b/src/test/java/org/opensearch/timeseries/common/exception/ValidationExceptionTests.java @@ -9,15 +9,15 @@ * GitHub history for details. */ -package org.opensearch.ad.common.exception; +package org.opensearch.timeseries.common.exception; +import org.opensearch.forecast.constant.ForecastCommonName; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.timeseries.common.exception.ValidationException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.ValidationAspect; import org.opensearch.timeseries.model.ValidationIssueType; -public class ADValidationExceptionTests extends OpenSearchTestCase { +public class ValidationExceptionTests extends OpenSearchTestCase { public void testConstructorDetector() { String message = randomAlphaOfLength(5); ValidationException exception = new ValidationException(message, ValidationIssueType.NAME, ValidationAspect.DETECTOR); @@ -41,4 +41,11 @@ public void testToString() { String exceptionStringNoType = exceptionNoType.toString(); logger.info("exception string no type: " + exceptionStringNoType); } + + public void testForecasterAspect() { + String message = randomAlphaOfLength(5); + ValidationException exception = new ValidationException(message, ValidationIssueType.CATEGORY, ValidationAspect.FORECASTER); + assertEquals(ValidationIssueType.CATEGORY, exception.getType()); + assertEquals(ValidationAspect.getName(ForecastCommonName.FORECASTER_ASPECT), exception.getAspect()); + } } diff --git a/src/test/java/org/opensearch/timeseries/dataprocessor/ImputationOptionTests.java b/src/test/java/org/opensearch/timeseries/dataprocessor/ImputationOptionTests.java index a0df85ead..f1cb8b36e 100644 --- a/src/test/java/org/opensearch/timeseries/dataprocessor/ImputationOptionTests.java +++ b/src/test/java/org/opensearch/timeseries/dataprocessor/ImputationOptionTests.java @@ -26,7 +26,7 @@ public void testStreamInputAndOutput() throws IOException { ImputationMethod method = ImputationMethod.PREVIOUS; double[] defaultFill = { 1.0, 2.0, 3.0 }; - ImputationOption option = new ImputationOption(method, Optional.of(defaultFill)); + ImputationOption option = new ImputationOption(method, Optional.of(defaultFill), false); // Write the ImputationOption to the StreamOutput. BytesStreamOutput out = new BytesStreamOutput(); @@ -44,9 +44,9 @@ public void testStreamInputAndOutput() throws IOException { public void testToXContent() throws IOException { double[] defaultFill = { 1.0, 2.0, 3.0 }; - ImputationOption imputationOption = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill)); + ImputationOption imputationOption = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill), false); - String xContent = "{" + "\"method\":\"FIXED_VALUES\"," + "\"defaultFill\":[1.0,2.0,3.0]" + "}"; + String xContent = "{" + "\"method\":\"FIXED_VALUES\"," + "\"defaultFill\":[1.0,2.0,3.0],\"integerSensitive\":false" + "}"; XContentBuilder builder = imputationOption.toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS); String actualJson = BytesReference.bytes(builder).utf8ToString(); @@ -55,10 +55,10 @@ public void testToXContent() throws IOException { } public void testParse() throws IOException { - String xContent = "{" + "\"method\":\"FIXED_VALUES\"," + "\"defaultFill\":[1.0,2.0,3.0]" + "}"; + String xContent = "{" + "\"method\":\"FIXED_VALUES\"," + "\"defaultFill\":[1.0,2.0,3.0],\"integerSensitive\":false" + "}"; double[] defaultFill = { 1.0, 2.0, 3.0 }; - ImputationOption imputationOption = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill)); + ImputationOption imputationOption = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill), false); try ( XContentParser parser = JsonXContent.jsonXContent @@ -86,9 +86,9 @@ public void testEqualsAndHashCode() { double[] defaultFill1 = { 1.0, 2.0, 3.0 }; double[] defaultFill2 = { 4.0, 5.0, 6.0 }; - ImputationOption option1 = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill1)); - ImputationOption option2 = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill1)); - ImputationOption option3 = new ImputationOption(ImputationMethod.LINEAR, Optional.of(defaultFill2)); + ImputationOption option1 = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill1), false); + ImputationOption option2 = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill1), false); + ImputationOption option3 = new ImputationOption(ImputationMethod.LINEAR, Optional.of(defaultFill2), false); // Test reflexivity assertTrue(option1.equals(option1)); @@ -98,7 +98,7 @@ public void testEqualsAndHashCode() { assertTrue(option2.equals(option1)); // Test transitivity - ImputationOption option2Clone = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill1)); + ImputationOption option2Clone = new ImputationOption(ImputationMethod.FIXED_VALUES, Optional.of(defaultFill1), false); assertTrue(option1.equals(option2)); assertTrue(option2.equals(option2Clone)); assertTrue(option1.equals(option2Clone)); diff --git a/src/test/java/test/org/opensearch/ad/util/FakeNode.java b/src/test/java/test/org/opensearch/ad/util/FakeNode.java index 1af160f91..672072884 100644 --- a/src/test/java/test/org/opensearch/ad/util/FakeNode.java +++ b/src/test/java/test/org/opensearch/ad/util/FakeNode.java @@ -37,7 +37,6 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.io.stream.NamedWriteableRegistry; -import org.opensearch.common.lease.Releasable; import org.opensearch.common.network.NetworkService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; @@ -45,6 +44,7 @@ import org.opensearch.common.transport.BoundTransportAddress; import org.opensearch.common.transport.TransportAddress; import org.opensearch.common.util.PageCacheRecycler; +import org.opensearch.core.common.lease.Releasable; import org.opensearch.indices.breaker.NoneCircuitBreakerService; import org.opensearch.tasks.TaskManager; import org.opensearch.tasks.TaskResourceTrackingService; diff --git a/src/test/java/test/org/opensearch/ad/util/MLUtil.java b/src/test/java/test/org/opensearch/ad/util/MLUtil.java index 870367378..988c14526 100644 --- a/src/test/java/test/org/opensearch/ad/util/MLUtil.java +++ b/src/test/java/test/org/opensearch/ad/util/MLUtil.java @@ -61,7 +61,7 @@ public static Queue createQueueSamples(int size) { public static ModelState randomModelState(RandomModelStateConfig config) { boolean fullModel = config.getFullModel() != null && config.getFullModel().booleanValue() ? true : false; float priority = config.getPriority() != null ? config.getPriority() : random.nextFloat(); - String detectorId = config.getDetectorId() != null ? config.getDetectorId() : randomString(15); + String detectorId = config.getId() != null ? config.getId() : randomString(15); int sampleSize = config.getSampleSize() != null ? config.getSampleSize() : random.nextInt(minSampleSize); Clock clock = config.getClock() != null ? config.getClock() : Clock.systemUTC(); diff --git a/src/test/java/test/org/opensearch/ad/util/RandomModelStateConfig.java b/src/test/java/test/org/opensearch/ad/util/RandomModelStateConfig.java index 757ba1bc5..25a2da1bd 100644 --- a/src/test/java/test/org/opensearch/ad/util/RandomModelStateConfig.java +++ b/src/test/java/test/org/opensearch/ad/util/RandomModelStateConfig.java @@ -38,7 +38,7 @@ public Float getPriority() { return priority; } - public String getDetectorId() { + public String getId() { return detectorId; }