Skip to content

Commit

Permalink
Release/v0.4.38 (#191)
Browse files Browse the repository at this point in the history
- default base docker image set to CALDP_20221010_CAL_final
- default crds update to hst_1038.pmap
- pull cert chain from ami where the image is built
- significant unit test coverage additions
- node install bugfix in ami-rotation userdata

Co-authored-by: Brian Hayden <[email protected]>
Co-authored-by: jshihstsci <[email protected]>
  • Loading branch information
3 people authored Oct 11, 2022
1 parent c9a6ce6 commit 2bb0f5d
Show file tree
Hide file tree
Showing 34 changed files with 2,353 additions and 65 deletions.
26 changes: 23 additions & 3 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,28 @@ jobs:
uses: actions/checkout@v2

- name: install requirements
run: pip install .[dev]
run: |
pip install .[dev]
pip install numpy sklearn tensorflow --upgrade-strategy only-if-needed
- name: run pytest code coverage check
run: pytest --cov=lambda --cov-fail-under 75 -rP
run: |
pytest \
--verbose \
--cov=calcloud \
--cov=lambda/AmiRotation \
--cov=lambda/batch_events \
--cov=lambda/blackboard \
--cov=lambda/broadcast \
--cov=lambda/JobClean \
--cov=lambda/JobDelete \
--cov=lambda/JobPredict \
--cov=lambda/JobRescue \
--cov=lambda/ModelIngest \
--cov=lambda/refreshCacheLogs \
--cov=lambda/refreshCacheSubmit \
--cov=lambda/s3_trigger \
--cov-fail-under 30 \
pytest:
name: pytest
Expand All @@ -36,7 +54,9 @@ jobs:
uses: actions/checkout@v2

- name: install requirements
run: pip install .[dev]
run: |
pip install .[dev]
pip install numpy sklearn tensorflow --upgrade-strategy only-if-needed
- name: run pytest
run: pytest -rP
1 change: 1 addition & 0 deletions calcloud/hst.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,4 +71,5 @@ def get_output_path(output_uri, ipppssoot):
>>> get_output_path("s3://temp/batch-2020-02-13T10:33:00", "IC0B02020")
's3://temp/batch-2020-02-13T10:33:00/wfc3/IC0B02020'
"""
# This function does not appear to be used anywhere, may have been deprecated
return output_uri + "/" + get_instrument(ipppssoot) + "/" + ipppssoot
3 changes: 2 additions & 1 deletion calcloud/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,8 @@ def broadcast(self, type, ipppssoots, payload=None):
assert type in MESSAGE_TYPES
assert type != "broadcast" # don't broadcast broadcasts....
assert isinstance(ipppssoots, list)
assert not len(ipppssoots) or isinstance(ipppssoots[0], str)
assert len(ipppssoots)
assert isinstance(ipppssoots[0], str)
assert "all" not in ipppssoots # don't broadcast message tails of "all"
assert len(ipppssoots) < MAX_BROADCAST_MSGS
msg = f"broadcast-{self.get_id()}"
Expand Down
2 changes: 1 addition & 1 deletion calcloud/plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def invoke_lambda_predict(ipppssoot, output_bucket):
Payload=json.dumps(inputParams),
)
predictions = json.load(response["Payload"])
print(f"Predictions for {ipppssoot}: \n {predictions}")
log.info(f"Predictions for {ipppssoot}: {predictions}")
# defaults: db_clock=20 minutes, wc_std=5
db_clock, wc_std = query_ddb(ipppssoot)
clockTime = predictions["clockTime"] * (1 + wc_std)
Expand Down
6 changes: 5 additions & 1 deletion changelog.md
Original file line number Diff line number Diff line change
@@ -1 +1,5 @@
- hotfix for protobuf pinned to 3.20.1 to deal with dependency-related error in modeling lambda/training images
- default base docker image set to CALDP_20221010_CAL_final
- default crds update to hst_1038.pmap
- pull cert chain from ami where the image is built
- significant unit test coverage additions
- node install bugfix in ami-rotation userdata
2 changes: 1 addition & 1 deletion lambda/JobDelete/delete_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def lambda_handler(event, context):
comm.messages.put(f"terminated-{ipst}", "cancel lambda " + bucket_name)
try:
metadata = comm.xdata.get(ipst)
except comm.xdata.client.exceptions.NoSuchKeyError:
except comm.xdata.client.exceptions.NoSuchKey:
metadata = dict(job_id=job_id, cancel_type="job_id")
metadata["terminated"] = True
comm.xdata.put(ipst, metadata)
Expand Down
7 changes: 6 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,7 @@
[tool.black]
line-length = 120
line-length = 120

[tool.pytest.ini_options]
pythonpath = [
"."
]
4 changes: 3 additions & 1 deletion terraform/README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
NOTE: This README is quite out of date with the current terraform scripts.

This set of terraform scripts will stand up an AWS Batch processing environment to process *HST* data using a docker image.

### Prerequisites
Expand All @@ -22,4 +24,4 @@ This set of terraform scripts will stand up an AWS Batch processing environment
- `terraform plan -var-file local.tfvars`
- `terraform apply -var-file template.tfvars`

Now you should have a functioning AWS Batch environment that can run the caldp docker image. To destroy it just run `terraform destroy -var-file local.tfvars`
Now you should have a functioning AWS Batch environment that can run the caldp docker image. To destroy it just run `terraform destroy -var-file local.tfvars`
1 change: 1 addition & 0 deletions terraform/deploy_docker_builds.sh
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ fi

# caldp image
cd ${CALDP_BUILD_DIR}
cp /etc/ssl/certs/ca-bundle.crt tls-ca-bundle.pem # copy the cert from CI node AMI and replace the one hard coded in the repo
set -o pipefail && docker build -f Dockerfile -t ${CALDP_DOCKER_IMAGE} --build-arg CAL_BASE_IMAGE="${CAL_BASE_IMAGE}" .
caldp_docker_build_status=$?
if [[ $caldp_docker_build_status -ne 0 ]]; then
Expand Down
10 changes: 5 additions & 5 deletions terraform/deploy_vars.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#! /bin/bash -xu

export CALCLOUD_VER="v0.4.37"
export CALDP_VER="v0.2.19"
export CAL_BASE_IMAGE="stsci/hst-pipeline:CALDP_20220527_CAL_final"
export CALCLOUD_VER="v0.4.38"
export CALDP_VER="v0.2.20"
export CAL_BASE_IMAGE="stsci/hst-pipeline:CALDP_20221010_CAL_final"

export BASE_IMAGE_TAG=`cut -d ":" -f2- <<< ${CAL_BASE_IMAGE} `

Expand All @@ -11,7 +11,7 @@ export COMMON_IMAGE_TAG="CALCLOUD_${CALCLOUD_VER}-CALDP_${CALDP_VER}-BASE_${BASE
# i.e. CALCLOUD_BUILD_DIR="$HOME/deployer/calcloud"
# these can be set as environment variables before running to avoid changing the script directly
# (and avoid accidentally committing a custom path to the repo...)
export CALCLOUD_BUILD_DIR=${CALCLOUD_BUILD_DIR:-""}
export CALCLOUD_BUILD_DIR=${CALCLOUD_BUILD_DIR:-""}
export CALDP_BUILD_DIR=${CALDP_BUILD_DIR:-""}

export TMP_INSTALL_DIR="/tmp/calcloud_install"
Expand Down Expand Up @@ -55,4 +55,4 @@ CSYS_VER=${CAL_BASE_IMAGE##*:}
CSYS_VER=`echo $CSYS_VER | cut -f1,2 -d'_'` #split by underscores, keep the first two
export CSYS_VER=`echo $CSYS_VER | awk '{print tolower($0)}'`

# get repo_url here for the central ecr repo
# get repo_url here for the central ecr repo
8 changes: 4 additions & 4 deletions terraform/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -103,10 +103,10 @@ variable crds_context {
description = "the crds context to be the default for the environment"
type = map(string)
default = {
"-sb" = "hst_0866.pmap"
"-dev" = "hst_0866.pmap"
"-test" = "hst_1015.pmap"
"-ops" = "hst_1015.pmap"
"-sb" = "hst_0962.pmap"
"-dev" = "hst_0962.pmap"
"-test" = "hst_1038.pmap"
"-ops" = "hst_1038.pmap"
}
}

Expand Down
1 change: 1 addition & 0 deletions tests/artifacts/events/generic-message-event.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"Records": [{"eventVersion": "2.1", "eventSource": "aws:s3", "awsRegion": "us-east-1", "eventTime": "2022-07-25T15:51:12.813Z", "eventName": "ObjectCreated:Put", "userIdentity": {"principalId": "xxxxxxxxxxx"}, "requestParameters": {"sourceIPAddress": "xxxxxxxxxxx"}, "responseElements": {"x-amz-request-id": "xxxxxxxxxxx", "x-amz-id-2": "xxxxxxxxxxx"}, "s3": {"s3SchemaVersion": "1.0", "configurationId": "xxxxxxxxxxx", "bucket": {"name": "calcloud-processing-moto", "ownerIdentity": {"principalId": "xxxxxxxxxxx"}, "arn": "arn:aws:s3:::calcloud-processing-moto"}, "object": {"key": "messages/clean-ipppssoot", "size": 0, "eTag": "xxxxxxxxxxx", "sequencer": "xxxxxxxxxxx"}}}]}
26 changes: 26 additions & 0 deletions tests/artifacts/events/generic-message-event.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
Records:
- awsRegion: us-east-1
eventName: ObjectCreated:Put
eventSource: aws:s3
eventTime: '2022-07-25T15:51:12.813Z'
eventVersion: '2.1'
requestParameters:
sourceIPAddress: xxxxxxxxxxx
responseElements:
x-amz-id-2: xxxxxxxxxxx
x-amz-request-id: xxxxxxxxxxx
s3:
bucket:
arn: arn:aws:s3:::calcloud-processing-moto
name: calcloud-processing-moto
ownerIdentity:
principalId: xxxxxxxxxxx
configurationId: xxxxxxxxxxx
object:
eTag: xxxxxxxxxxx
key: messages/generic-ipppssoot
sequencer: xxxxxxxxxxx
size: 0
s3SchemaVersion: '1.0'
userIdentity:
principalId: xxxxxxxxxxx
83 changes: 83 additions & 0 deletions tests/common.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
"""This module exists because importing calcloud into conftest.py causes doctests to run and fail,
and I don't know why. But it doesn't happen with this module."""

import copy

from calcloud import io


def assert_empty_messages(comm):
messages = comm.messages.listl()
assert len(messages) == 0


def setup_diverse_messages(comm, overrides={}):
"""posts messages of most types from io.MESSAGE_TYPES
ignores broadcast and clean message types
note (bhayden): I tried putting this function into conftest
but importing calcloud.io caused pytest to run all of the doctests
and fail miserably.
"""
assert_empty_messages(comm)

# we'll make a message of each type for a unique list of ipppssoots
message_types = copy.copy(io.MESSAGE_TYPES)
# ... except for broadcast and clean
message_types.remove("broadcast")
message_types.remove("clean")

# insert the messages
ipppssoots = []
for i, m in enumerate(message_types):
ipst = f"ipppss{str(i).zfill(2)}t"
ipppssoots.append(ipst)
comm.messages.put(f"{m}-{ipst}", payload=overrides)

# read them back and assert they're there
mess = comm.messages.listl()
for i, m in enumerate(message_types):
ipst = f"ipppss{str(i).zfill(2)}t"
assert f"{m}-{ipst}" in mess

return ipppssoots, message_types


def setup_ingest_messages(comm, overrides={}):
"""adds a few extra ingested messages to the diverse messages list"""
ipppssoots, message_types = setup_diverse_messages(comm, overrides)
n = len(ipppssoots)

# insert the extra ingested messages
for i in range(3):
ipst = f"ipppss{str(i+n).zfill(2)}t"
comm.messages.put(f"ingested-{ipst}", payload=overrides)
ipppssoots.append(ipst)
message_types.append("ingested")

# read all messages back and assert they match the lists
mess = comm.messages.listl()
for i, m in enumerate(message_types):
ipst = f"ipppss{str(i).zfill(2)}t"
assert f"{m}-{ipst}" in mess
return ipppssoots, message_types


def setup_error_messages(comm, overrides={}):
"""adds a few extra error messages to the diverse messages list"""
ipppssoots, message_types = setup_diverse_messages(comm, overrides=overrides)
n = len(ipppssoots)

# insert the extra error messages
for i in range(3):
ipst = f"ipppss{str(i+n).zfill(2)}t"
comm.messages.put(f"error-{ipst}", payload=overrides)
ipppssoots.append(ipst)
message_types.append("error")
print("error", ipst, comm.messages.get(f"{'error'}-{ipst}"))

# read all messages back and assert they match the lists
mess = comm.messages.listl()
for i, m in enumerate(message_types):
ipst = f"ipppss{str(i).zfill(2)}t"
assert f"{m}-{ipst}" in mess
return ipppssoots, message_types
Loading

0 comments on commit 2bb0f5d

Please sign in to comment.