Skip to content

Commit

Permalink
added integration tests for anomaly detector
Browse files Browse the repository at this point in the history
  • Loading branch information
AlxEnashi committed Apr 12, 2024
1 parent 759f7ea commit a5734e2
Show file tree
Hide file tree
Showing 5 changed files with 166 additions and 1 deletion.
14 changes: 14 additions & 0 deletions .github/workflows/lc_classification_step.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@ jobs:
TEST_MESSI_FEATURE_QUANTILES_PATH: "${{ secrets.TEST_MESSI_FEATURE_QUANTILES_PATH }}"
TEST_TORETTO_MODEL_PATH: "${{ secrets.TEST_TORETTO_MODEL_PATH }}"
TEST_MLP_MODEL_PATH: "${{ secrets.TEST_MLP_MODEL_PATH }}"
TEST_ANOMALY_QUANTILES_PATH: "${{ secrets.TEST_ANOMALY_QUANTILES_PATH }}"
TEST_ANOMALY_MODEL_PATH: "${{ secrets.TEST_ANOMALY_MODEL_PATH }}"
lc_classification_step_integration:
uses: ./.github/workflows/poetry-tests-template.yaml
with:
Expand All @@ -47,6 +49,8 @@ jobs:
TEST_MESSI_FEATURE_QUANTILES_PATH: "${{ secrets.TEST_MESSI_FEATURE_QUANTILES_PATH }}"
TEST_TORETTO_MODEL_PATH: "${{ secrets.TEST_TORETTO_MODEL_PATH }}"
TEST_MLP_MODEL_PATH: "${{ secrets.TEST_MLP_MODEL_PATH }}"
TEST_ANOMALY_QUANTILES_PATH: "${{ secrets.TEST_ANOMALY_QUANTILES_PATH }}"
TEST_ANOMALY_MODEL_PATH: "${{ secrets.TEST_ANOMALY_MODEL_PATH }}"

build-lc-classification-balto-dagger:
uses: ./.github/workflows/template_build_with_dagger.yaml
Expand Down Expand Up @@ -87,3 +91,13 @@ jobs:
submodules: true
secrets:
GH_TOKEN: ${{ secrets.ADMIN_TOKEN }}

build-lc-classification-anomaly-dagger:
uses: ./.github/workflows/template_build_with_dagger.yaml
with:
ref: ${{ github.ref }}
stage: staging
extra-args: lc_classification_step_anomaly --build-args=model:anomaly --package-dir=lc_classification_step --dry-run
submodules: true
secrets:
GH_TOKEN: ${{ secrets.ADMIN_TOKEN }}
3 changes: 2 additions & 1 deletion .github/workflows/staging_ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ jobs:
stage: staging
extra-args: feature_step
secrets:
GH_TOKEN: ${{ secrets.ADMIN_TOKEN }}
GH_TOKEN: ${{ secrets.ADMIN_TOKEN }}lc-classifier-step-ztf

build-lc-classification-balto-dagger:
needs: update-packages-dagger
Expand Down Expand Up @@ -300,6 +300,7 @@ jobs:
# feature step freezed 23.x version
poetry run python main.py deploy add-package feature-step-23.x --values=feature_step-helm-values:28 --chart-folder=feature_step --chart=feature-step
poetry run python main.py deploy add-package lc-classifier-step-ztf --chart=lc-classifier-step --values=lc_classification_step-helm-values --chart-folder=lc_classification_step
poetry run python main.py deploy add-package lc-classification-step-anomaly --chart=lc-classifier-step --values=anomaly-detector-step-helm-values --chart-folder=lc_classification_step
poetry run python main.py deploy add-package lightcurve-step --values=lightcurve-step-helm-values
poetry run python main.py deploy add-package logstash --values=logstash-helm-values --chart=logstash --chart-folder=logstash
poetry run python main.py deploy add-package magstats-step --values=magstats_step-helm-values --chart-folder=magstats_step
Expand Down
13 changes: 13 additions & 0 deletions lc_classification_step/models_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,17 @@ def ztf_params(model_class: str):
"NAME": "lc_classifier",
}

def anomaly_params(model_class: str):
print("---------------HERE------------")
return {
"PARAMS": {
"model_path": os.getenv("MODEL_PATH"),
"feature_quantiles_path": os.getenv("FEATURE_QUANTILES_PATH"),
"mapper": os.getenv("MAPPER_CLASS"),
},
"CLASS": model_class,
"NAME": model_class.split(".")[-1],
}

def configurator(model_class: str):
if model_class.endswith("BaltoClassifier"):
Expand All @@ -63,6 +74,8 @@ def configurator(model_class: str):
return messi_params(model_class)
if model_class.endswith("RandomForestFeaturesClassifier"):
return toretto_params(model_class)
if model_class.endswith("AnomalyDetector"):
return anomaly_params(model_class)
if model_class.endswith(
"RandomForestFeaturesHeaderClassifier"
) or model_class.endswith("TinkyWinkyClassifier"):
Expand Down
47 changes: 47 additions & 0 deletions lc_classification_step/tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,53 @@ def set_env_variables():
yield set_env_variables
os.environ = env_copy

@pytest.fixture
def env_variables_anomaly():
env_copy = os.environ.copy()

def set_env_variables(
model: str,
model_class: str,
extra_env_vars: dict = {},
):
random_string = uuid.uuid4().hex
step_schema_path = pathlib.Path(
pathlib.Path(__file__).parent.parent.parent.parent,
"schemas/lc_classification_step",
)
env_variables_dict = {
"PRODUCER_SCHEMA_PATH": str(
step_schema_path / "output_elasticc.avsc"
),
"METRICS_SCHEMA_PATH": str(step_schema_path / "metrics.json"),
"SCRIBE_SCHEMA_PATH": str(
step_schema_path / "../scribe_step/scribe.avsc"
),
"CONSUMER_SERVER": "localhost:9092",
"CONSUMER_TOPICS": "features_anomaly",
"CONSUMER_GROUP_ID": random_string,
"PRODUCER_SERVER": "localhost:9092",
"PRODUCER_TOPIC_FORMAT": f"lc_classifier_{model}%s",
"PRODUCER_DATE_FORMAT": "%Y%m%d",
"PRODUCER_CHANGE_HOUR": "23",
"PRODUCER_RETENTION_DAYS": "1",
"SCRIBE_SERVER": "localhost:9092",
"METRICS_HOST": "localhost:9092",
"METRICS_TOPIC": "metrics",
"SCRIBE_TOPIC": "w_object",
"CONSUME_MESSAGES": "5",
"ENABLE_PARTITION_EOF": "True",
"STREAM": "elasticc",
"MODEL_CLASS": model_class,
"SCRIBE_PARSER_CLASS": "lc_classification.core.parsers.scribe_parser.ScribeParser",
"STEP_PARSER_CLASS": "lc_classification.core.parsers.anomaly_parser.AnomalyParser",
}
env_variables_dict.update(extra_env_vars)
for key, value in env_variables_dict.items():
os.environ[key] = value

yield set_env_variables
os.environ = env_copy

@pytest.fixture
def env_variables_elasticc():
Expand Down
90 changes: 90 additions & 0 deletions lc_classification_step/tests/integration/test_step_anomaly.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
import json
import os
from typing import Callable

import pytest
from apf.consumers import KafkaConsumer

from lc_classification.core.step import LateClassifier
from tests.test_commons import (
assert_command_is_correct,
assert_ztf_object_is_correct,
)


@pytest.mark.ztf
def test_step_anomaly_result(
kafka_service,
produce_messages,
env_variables_anomaly,
kafka_consumer: Callable[[str], KafkaConsumer],
scribe_consumer: Callable[[], KafkaConsumer],
):
produce_messages("features_anomaly")
env_variables_anomaly(
"anomaly",
"alerce_classifiers.anomaly.model.AnomalyDetector",
{
"MODEL_PATH": os.getenv("TEST_ANOMALY_MODEL_PATH"),
"FEATURE_QUANTILES_PATH": os.getenv(
"TEST_ANOMALY_QUANTILES_PATH"
),
"MAPPER_CLASS": "alerce_classifiers.anomaly.mapper.AnomalyMapper",
},
)

from settings import config

kconsumer = kafka_consumer("anomaly")
sconsumer = scribe_consumer()

step = LateClassifier(config=config())
step.start()

for message in kconsumer.consume():
assert_ztf_object_is_correct(message)
kconsumer.commit()

for message in sconsumer.consume():
command = json.loads(message["payload"])
assert_command_is_correct(command)
sconsumer.commit()


@pytest.mark.ztf
def test_step_anomaly_no_features_result(
kafka_service,
produce_messages,
env_variables_anomaly,
kafka_consumer: Callable[[str], KafkaConsumer],
scribe_consumer: Callable[[], KafkaConsumer],
):
produce_messages("features_anomaly", force_missing_features=True)
env_variables_anomaly(
"anomaly",
"alerce_classifiers.anomaly.model.AnomalyDetector",
{
"MODEL_PATH": os.getenv("TEST_ANOMALY_MODEL_PATH"),
"FEATURE_QUANTILES_PATH": os.getenv(
"TEST_ANOMALY_QUANTILES_PATH"
),
"MAPPER_CLASS": "alerce_classifiers.anomaly.mapper.AnomalyMapper",
},
)

from settings import config

kconsumer = kafka_consumer("anomaly")
sconsumer = scribe_consumer()

step = LateClassifier(config=config())
step.start()

for message in kconsumer.consume():
assert_ztf_object_is_correct(message)
kconsumer.commit()

for message in sconsumer.consume():
command = json.loads(message["payload"])
assert_command_is_correct(command)
sconsumer.commit()

0 comments on commit a5734e2

Please sign in to comment.