Skip to content

Commit

Permalink
Merge pull request #68 from punch-mission/replace-punchdata
Browse files Browse the repository at this point in the history
remove remaining punchdata references
  • Loading branch information
jmbhughes authored Aug 8, 2024
2 parents ab42909 + 3c23366 commit 2d1193f
Show file tree
Hide file tree
Showing 11 changed files with 101 additions and 96 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.11", "3.12"]
python-version: ["3.11", "3.12"]

steps:
- uses: actions/checkout@v3
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- name: Set up Python 3.10
uses: actions/setup-python@v2
with:
python-version: "3.10"
python-version: "3.11"

- name: Install dependencies
run: |
Expand Down
68 changes: 0 additions & 68 deletions punchpipe/controlsegment/__init__.py
Original file line number Diff line number Diff line change
@@ -1,68 +0,0 @@
from datetime import datetime

import numpy as np
import pytest
from astropy.nddata import StdDevUncertainty
from astropy.wcs import WCS
from punchbowl.data import NormalizedMetadata, PUNCHData

from punchpipe.controlsegment.db import File
from punchpipe.controlsegment.util import match_data_with_file_db_entry


@pytest.fixture()
def sample_punchdata(shape=(50, 50), level=0):
data = np.random.random(shape)
uncertainty = StdDevUncertainty(np.sqrt(np.abs(data)))
wcs = WCS(naxis=2)
wcs.wcs.ctype = "HPLN-ARC", "HPLT-ARC"
wcs.wcs.cunit = "deg", "deg"
wcs.wcs.cdelt = 0.1, 0.1
wcs.wcs.crpix = 0, 0
wcs.wcs.crval = 1, 1
wcs.wcs.cname = "HPC lon", "HPC lat"

meta = NormalizedMetadata({"LEVEL": level})
return PUNCHData(data=data, uncertainty=uncertainty, wcs=wcs, meta=meta)


def test_match_data_with_file_db_entry_fails_on_empty_list(sample_punchdata):
file_db_entry_list = []
with pytest.raises(RuntimeError):
match_data_with_file_db_entry(sample_punchdata, file_db_entry_list)


def test_match_data_with_file_db_entry(sample_punchdata):
file_db_entry_list = [
File(
level=1,
file_type="XX",
observatory="Y",
file_version="0",
software_version="0",
date_created=datetime.now(),
date_obs=datetime.now(),
date_beg=datetime.now(),
date_end=datetime.now(),
polarization="ZZ",
state="created",
processing_flow=0,
),
File(
level=100,
file_type="XX",
observatory="Y",
file_version="0",
software_version="0",
date_created=datetime.now(),
date_obs=datetime.now(),
date_beg=datetime.now(),
date_end=datetime.now(),
polarization="ZZ",
state="created",
processing_flow=0,
),
]
output = match_data_with_file_db_entry(sample_punchdata, file_db_entry_list)
assert len(output) == 1
assert output == file_db_entry_list[0]
3 changes: 1 addition & 2 deletions punchpipe/controlsegment/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@ def filename(self) -> str:
str
properly formatted PUNCH filename
"""
# TODO: include version number
return f'PUNCH_L{self.level}_{self.file_type}{self.observatory}_{self.date_obs.strftime("%Y%m%d%H%M%S")}.fits'
return f'PUNCH_L{self.level}_{self.file_type}{self.observatory}_{self.date_obs.strftime("%Y%m%d%H%M%S")}_v{self.file_version}.fits'

def directory(self, root: str):
"""Constructs the directory the file should be stored in
Expand Down
69 changes: 69 additions & 0 deletions punchpipe/controlsegment/tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
from datetime import datetime

import numpy as np
import pytest
from astropy.nddata import StdDevUncertainty
from astropy.wcs import WCS
from punchbowl.data import NormalizedMetadata
from ndcube import NDCube

from punchpipe.controlsegment.db import File
from punchpipe.controlsegment.util import match_data_with_file_db_entry


@pytest.fixture()
def sample_punchdata(shape=(50, 50), level=0):
data = np.random.random(shape)
uncertainty = StdDevUncertainty(np.sqrt(np.abs(data)))
wcs = WCS(naxis=2)
wcs.wcs.ctype = "HPLN-ARC", "HPLT-ARC"
wcs.wcs.cunit = "deg", "deg"
wcs.wcs.cdelt = 0.1, 0.1
wcs.wcs.crpix = 0, 0
wcs.wcs.crval = 1, 1
wcs.wcs.cname = "HPC lon", "HPC lat"

meta = NormalizedMetadata({"LEVEL": level})
return NDCube(data=data, uncertainty=uncertainty, wcs=wcs, meta=meta)


def test_match_data_with_file_db_entry_fails_on_empty_list(sample_punchdata):
file_db_entry_list = []
with pytest.raises(RuntimeError):
match_data_with_file_db_entry(sample_punchdata, file_db_entry_list)


def test_match_data_with_file_db_entry(sample_punchdata):
file_db_entry_list = [
File(
level=1,
file_type="XX",
observatory="Y",
file_version="0",
software_version="0",
date_created=datetime.now(),
date_obs=datetime.now(),
date_beg=datetime.now(),
date_end=datetime.now(),
polarization="ZZ",
state="created",
processing_flow=0,
),
File(
level=100,
file_type="XX",
observatory="Y",
file_version="0",
software_version="0",
date_created=datetime.now(),
date_obs=datetime.now(),
date_beg=datetime.now(),
date_end=datetime.now(),
polarization="ZZ",
state="created",
processing_flow=0,
),
]
output = match_data_with_file_db_entry(sample_punchdata, file_db_entry_list)
assert len(output) == 1
assert output == file_db_entry_list[0]
9 changes: 5 additions & 4 deletions punchpipe/controlsegment/tests/test_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
from astropy.wcs import WCS
from prefect import flow
from prefect.testing.utilities import prefect_test_harness
from punchbowl.data import NormalizedMetadata, PUNCHData
from punchbowl.data import NormalizedMetadata
from ndcube import NDCube
from pytest_mock_resources import create_mysql_fixture

from punchpipe.controlsegment.db import Base, File, Flow
Expand All @@ -24,7 +25,7 @@ def session_fn(session):
file_type='PM',
observatory='1',
state='created',
file_version='none',
file_version='1',
software_version='none',
date_obs=datetime(2023, 1, 1, 0, 0, 1),
processing_flow=0)
Expand All @@ -34,7 +35,7 @@ def session_fn(session):
file_type="PM",
observatory='1',
state='planned',
file_version='none',
file_version='1',
software_version='none',
date_obs=datetime(2023, 1, 1, 0, 0, 1),
processing_flow=1)
Expand Down Expand Up @@ -124,7 +125,7 @@ def normal_core_flow():

meta = NormalizedMetadata.load_template("PM1", "1")
meta['DATE-OBS'] = str(datetime(2023, 1, 1, 0, 0, 1))
output = PUNCHData(data=data, uncertainty=uncertainty, wcs=wcs, meta=meta)
output = NDCube(data=data, uncertainty=uncertainty, wcs=wcs, meta=meta)

return [output]

Expand Down
13 changes: 7 additions & 6 deletions punchpipe/controlsegment/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@

import yaml
from prefect import task
from punchbowl.data import PUNCHData
from sqlalchemy.orm import Session
from yaml.loader import FullLoader
from prefect_sqlalchemy.credentials import DatabaseCredentials
from ndcube import NDCube
from punchbowl.data import write_ndcube_to_fits, get_base_file_name

from punchpipe.controlsegment.db import File

Expand All @@ -32,26 +33,26 @@ def load_pipeline_configuration(path: str) -> dict:
return config


def write_file(data: PUNCHData, corresponding_file_db_entry, pipeline_config) -> None:
def write_file(data: NDCube, corresponding_file_db_entry, pipeline_config) -> None:
output_filename = os.path.join(
corresponding_file_db_entry.directory(pipeline_config["root"]), corresponding_file_db_entry.filename()
)
output_dir = os.path.dirname(output_filename)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
data.write(output_filename)
write_ndcube_to_fits(data, output_filename)
corresponding_file_db_entry.state = "created"


def match_data_with_file_db_entry(data: PUNCHData, file_db_entry_list):
def match_data_with_file_db_entry(data: NDCube, file_db_entry_list):
# figure out which file_db_entry this corresponds to
matching_entries = [
file_db_entry
for file_db_entry in file_db_entry_list
if file_db_entry.filename() == data.filename_base + ".fits"
if file_db_entry.filename() == get_base_file_name(data) + ".fits"
]
if len(matching_entries) == 0:
raise RuntimeError(f"There did not exist a file_db_entry for this result: result={data.filename_base}.")
raise RuntimeError(f"There did not exist a file_db_entry for this result: result={get_base_file_name(data)}.")
elif len(matching_entries) > 1:
raise RuntimeError("There were many database entries matching this result. There should only be one.")
else:
Expand Down
11 changes: 5 additions & 6 deletions punchpipe/level0/decode_sqrt.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@

import numpy as np
from prefect import get_run_logger, task
from ndcube import NDCube

from punchbowl.data import PUNCHData

TABLE_PATH = os.path.dirname(__file__) + "/decoding_tables/"

Expand Down Expand Up @@ -349,19 +349,19 @@ def decode_sqrt_by_table(data: Union[np.ndarray, float], table: np.ndarray) -> n


@task
def decode_sqrt_data(data_object: PUNCHData, overwrite_table: bool = False) -> PUNCHData:
def decode_sqrt_data(data_object: NDCube, overwrite_table: bool = False) -> NDCube:
"""Prefect task in the pipeline to decode square root encoded data
Parameters
----------
data_object : PUNCHData
data_object : NDCube
the object you wish to decode
overwrite_table
Toggle to regenerate and overwrite existing decoding table
Returns
-------
PUNCHData
NDCube
a modified version of the input with the data square root decoded
"""

Expand All @@ -386,8 +386,7 @@ def decode_sqrt_data(data_object: PUNCHData, overwrite_table: bool = False) -> P
ccd_read_noise=ccd_read_noise,
overwrite_table=overwrite_table,
)

data_object = data_object.duplicate_with_updates(data=decoded_data)
data_object.data[...] = decoded_data[...]

logger.info("square root decoding finished")
data_object.meta.history.add_now("LEVEL0-decode-sqrt", "image square root decoded")
Expand Down
7 changes: 4 additions & 3 deletions punchpipe/level0/tests/test_decode_sqrt.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
from astropy.wcs import WCS
from prefect.logging import disable_run_logger
from pytest import fixture
from ndcube import NDCube

from punchbowl.data import NormalizedMetadata, PUNCHData
from punchbowl.data import NormalizedMetadata
from punchpipe.level0.decode_sqrt import decode_sqrt, decode_sqrt_data, decode_sqrt_simple, encode_sqrt


Expand All @@ -30,7 +31,7 @@ def sample_punchdata():
meta = NormalizedMetadata.load_template("PM1", "0")
meta['DATE-OBS'] = str(datetime(2023, 1, 1, 0, 0, 1))

punchdata_obj = PUNCHData(data=data, uncertainty=uncertainty, wcs=wcs, meta=meta)
punchdata_obj = NDCube(data=data, uncertainty=uncertainty, wcs=wcs, meta=meta)

punchdata_obj.meta['RAWBITS'] = 16
punchdata_obj.meta['COMPBITS'] = 10
Expand Down Expand Up @@ -92,5 +93,5 @@ def test_decode_sqrt_data_task(sample_punchdata):

with disable_run_logger():
output_punchdata = decode_sqrt_data.fn(sample_punchdata, overwrite_table=True)
assert isinstance(output_punchdata, PUNCHData)
assert isinstance(output_punchdata, NDCube)
assert output_punchdata.data.shape == (2048, 2048)
8 changes: 5 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@ dependencies = [
"pyyaml",
"click",
"waitress",
"prefect-sqlalchemy"
"prefect-sqlalchemy",
"pylibjpeg[libjpeg]"
]
requires-python = ">=3.10"
requires-python = ">=3.11"
authors = [
{name = "J. Marcus Hughes", email = "[email protected]"},
]
Expand Down Expand Up @@ -57,6 +58,7 @@ docs = ["sphinx",
"sphinx-favicon",
"sphinxcontrib-mermaid",
"sphinx-automodapi"]
dev = ["punchpipe[test,docs]", "pre-commit"]

[project.scripts]
punchpipe = "punchpipe.cli:run"
Expand All @@ -75,7 +77,7 @@ packages = ["punchpipe"]
skip = "*.fts,*.fits,venv,*.pro,*.asdf,*.ipynb"

[tool.ruff]
target-version = 'py310'
target-version = 'py311'
exclude = ['tests', 'scripts', 'docs']
line-length=120

Expand Down
5 changes: 3 additions & 2 deletions scripts/test_create_ready_level0.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
from astropy.nddata import StdDevUncertainty
from astropy.wcs import WCS
from prefect import flow, task
from punchbowl.data import NormalizedMetadata, PUNCHData
from punchbowl.data import NormalizedMetadata
from ndcube import NDCube
from sqlalchemy.orm import Session
from prefect_sqlalchemy.credentials import DatabaseCredentials

Expand Down Expand Up @@ -70,7 +71,7 @@ def generate_fake_level0_data(date_obs):
wcs.wcs.cname = "HPC lon", "HPC lat"

meta = NormalizedMetadata.load_template("PM1", "0")
return PUNCHData(data=data, uncertainty=uncertainty, wcs=wcs, meta=meta)
return NDCube(data=data, uncertainty=uncertainty, wcs=wcs, meta=meta)


@flow
Expand Down

0 comments on commit 2d1193f

Please sign in to comment.