Skip to content

Commit

Permalink
Updated NAIF ID list
Browse files Browse the repository at this point in the history
added support for epoch query
dahlend committed Apr 18, 2024
1 parent 1b9897f commit 3d3c15c
Showing 4 changed files with 74 additions and 23 deletions.
10 changes: 9 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -7,6 +7,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]

### Added

- Added support for querying SPK kernels of a specific epoch of fit from JPL Horizons.

### Changed

- Updated NAIF ID list to include new designations for about 20-30 comets.


## [0.2.0] - 2024-3-16

@@ -19,6 +27,6 @@ Initial Release
modeling, tools for computing what minor planets can be seen by an observer.
Along with many helpful interfaces to web tools such as JPL Horizons or IPAC's IRSA.

### Added

[Unreleased]: https://github.com/IPAC-SW/neospy/tree/main
[0.2.0]: https://github.com/IPAC-SW/neospy/releases/tag/v0.2.0
36 changes: 27 additions & 9 deletions src/neospy/spice.py
Original file line number Diff line number Diff line change
@@ -182,7 +182,12 @@ def cached_kernel_url_download(url, force_download=False):

@staticmethod
def cached_kernel_horizons_download(
name, jd_start, jd_end, exact_name=False, update_cache=False
name,
jd_start,
jd_end,
exact_name=False,
update_cache=False,
apparition_year=None,
):
"""
Download a SPICE kernel from JPL Horizons and save it directly into the Cache.
@@ -208,9 +213,19 @@ def cached_kernel_horizons_download(
update_cache:
If the current state of the cache should be ignored and the file
re-downloaded.
apparition_year:
If the object is a comet, retrieve the orbit fit which is previous to this
specified year. If this is not provided, then default to the most recent
epoch of orbit fit. Ex: `apparition_year=1980` will return the closest
epoch before 1980.
"""
from .mpc import unpack_designation

if not isinstance(jd_start, Time):
jd_start = Time(jd_start)
if not isinstance(jd_end, Time):
jd_end = Time(jd_end)

if isinstance(name, str):
try:
name = unpack_designation(name)
@@ -227,26 +242,29 @@ def cached_kernel_horizons_download(
)
if "object" not in name_dat.json():
raise ValueError("Failed to find object: ", str(name_dat.json()))
comets = "c" in name_dat.json()["object"]["kind"].lower()
comet = "c" in name_dat.json()["object"]["kind"].lower()

if comet and apparition_year is None:
apparition_year = jd_end.ymd[0]

spk_id = int(name_dat.json()["object"]["spkid"])

dir_path = os.path.join(cache_path(), "kernels")
filename = os.path.join(dir_path, f"{spk_id}.bsp")

if apparition_year is not None:
filename = os.path.join(dir_path, f"{spk_id}_epoch_{apparition_year}.bsp")
else:
filename = os.path.join(dir_path, f"{spk_id}.bsp")

if os.path.isfile(filename) and not update_cache:
return

if not os.path.isdir(dir_path):
os.makedirs(dir_path)

if not isinstance(jd_start, Time):
jd_start = Time(jd_start)
if not isinstance(jd_end, Time):
jd_end = Time(jd_end)
jd_start = jd_start.strftime("%Y-%m-%d")
jd_end = jd_end.strftime("%Y-%m-%d")

cap = "CAP%3B" if comets else ""
cap = f"CAP<{apparition_year}%3B" if comet else ""
response = requests.get(
f"https://ssd.jpl.nasa.gov/api/horizons.api?COMMAND='DES={spk_id}%3B{cap}'"
f"&EPHEM_TYPE=SPK&START_TIME='{jd_start}'&STOP_TIME='{jd_end}'&CENTER=0",
40 changes: 32 additions & 8 deletions src/neospy_core/src/spice/naif_ids.rs
Original file line number Diff line number Diff line change
@@ -698,7 +698,7 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("185P", 1000349),
("193P", 1000350),
("C/2001 Q4", 1000351),
("P/2001 Q6", 1000352),
("473P", 1000352),
("201P", 1000353),
("371P", 1000354),
("370P", 1000355),
@@ -759,7 +759,7 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("225P", 1000413),
("402P", 1000415),
("C/2002 U2", 1000416),
("P/2002 T6", 1000417),
("472P", 1000417),
("C/2002 T7", 1000418),
("C/2002 V1", 1000419),
("C/2002 V2", 1000420),
@@ -820,7 +820,7 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("C/2004 C1", 1000478),
("C/2004 D1", 1000479),
("C/1996 R3", 1000480),
("P/2004 DO29", 1000481),
("475P", 1000481),
("D/1952 B1", 1000482),
("C/1993 Y1", 1000483),
("272P", 1000484),
@@ -3452,7 +3452,7 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("C/2011 M1", 1003135),
("P/2011 N1", 1003136),
("C/2011 N2", 1003137),
("P/2011 NO1", 1003138),
("479P", 1003138),
("P/2011 P1", 1003139),
("P/2010 JC81", 1003140),
("C/2011 P2", 1003141),
@@ -3541,7 +3541,7 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("C/2012 Y3", 1003227),
("C/2013 A1", 1003228),
("416P", 1003229),
("P/2012 WA34", 1003230),
("481P", 1003230),
("P/2013 AL76", 1003231),
("C/2013 B2", 1003232),
("C/2012 LP26", 1003233),
@@ -3606,7 +3606,7 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("405P", 1003292),
("C/2013 Y2", 1003293),
("P/2014 A2", 1003294),
("P/2014 A3", 1003295),
("480P", 1003295),
("C/2014 A4", 1003296),
("C/2014 A5", 1003297),
("C/2014 B1", 1003298),
@@ -3771,7 +3771,7 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("332P-E", 1003458),
("332P-I", 1003459),
("P/2016 G1", 1003460),
("P/2015 HG16", 1003461),
("476P", 1003461),
("C/2015 WZ", 1003462),
("C/2016 J2", 1003463),
("P/2016 J1-A", 1003464),
@@ -3892,7 +3892,7 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("C/2018 N2", 1003580),
("C/2018 KJ3", 1003581),
("C/2018 O1", 1003582),
("P/2018 P3", 1003583),
("477P", 1003583),
("P/2018 P4", 1003584),
("C/2018 P5", 1003585),
("C/2018 R3", 1003586),
@@ -4263,6 +4263,30 @@ pub const NAIF_IDS: &[(&str, isize)] = &[
("C/2023 V1", 1003955),
("P/2023 V2", 1003956),
("C/2023 V3", 1003957),
("C/2023 V4", 1003958),
("C/2023 V5", 1003959),
("P/2023 V6", 1003960),
("C/2023 RN3", 1003961),
("C/2023 X1", 1003962),
("C/2023 X2", 1003964),
("P/2023 X3", 1003965),
("C/2023 X4", 1003966),
("P/2023 Y1", 1003967),
("P/2023 Y2", 1003968),
("474P", 1003969),
("C/2024 A1", 1003970),
("478P", 1003971),
("C/2024 B1", 1003972),
("C/2024 C1", 1003973),
("C/2024 C2", 1003974),
("C/2024 A2", 1003975),
("C/2024 B2", 1003976),
("C/2024 C3", 1003977),
("C/2024 C4", 1003978),
("C/2023 X7", 1003979),
("482P", 1003980),
("P/2005 XR132", 1003981),
("C/2019 G2", 1003982),
("ceres", 2000001),
("pallas", 2000002),
("vesta", 2000004),
11 changes: 6 additions & 5 deletions src/neospy_core/src/spice/spk.rs
Original file line number Diff line number Diff line change
@@ -196,18 +196,19 @@ impl SpkSegmentCollection {

/// Given an SPK filename, load all the segments present inside of it.
/// These segments are added to the SPK singleton in memory.
pub fn load_file(&mut self, filename: &str) -> Result<(), NEOSpyError> {
pub fn load_file(&mut self, filename: &str) -> Result<Daf, NEOSpyError> {
let mut file = std::fs::File::open(filename)?;
let mut buffer = Vec::new();
let _ = file.read_to_end(&mut buffer)?;
let mut buffer = Cursor::new(&buffer);

self.load_segments(&mut buffer)
let daf = self.load_segments(&mut buffer)?;
Ok(daf)
}

/// Given a reference to a buffer, load all the segments present inside of it.
/// These segments are added to the SPK singleton in memory.
pub fn load_segments<T: Read + Seek>(&mut self, mut buffer: T) -> Result<(), NEOSpyError> {
pub fn load_segments<T: Read + Seek>(&mut self, mut buffer: T) -> Result<Daf, NEOSpyError> {
let daf = Daf::try_load_header(&mut buffer)?;
if daf.daf_type != DAFType::Spk {
return Err(NEOSpyError::IOError(
@@ -222,7 +223,7 @@ impl SpkSegmentCollection {
.push(SpkSegment::from_summary(&mut buffer, summary)?);
}

Ok(())
Ok(daf)
}

/// Return all mappings from one object to another.
@@ -302,7 +303,7 @@ impl SpkSegmentCollection {

for preload in PRELOAD_SPKS {
let mut de440 = Cursor::new(preload);
self.load_segments(&mut de440).unwrap();
let _ = self.load_segments(&mut de440).unwrap();
}
self.build_cache();
}

0 comments on commit 3d3c15c

Please sign in to comment.