Skip to content

Commit

Permalink
Fix bug with collecting realtime data whilst still extracting
Browse files Browse the repository at this point in the history
  • Loading branch information
vingerha committed Nov 25, 2023
1 parent 39a4e8a commit 614c97a
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 46 deletions.
42 changes: 25 additions & 17 deletions custom_components/gtfs2/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
ATTR_LONGITUDE,
ATTR_RT_UPDATED_AT
)
from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index, create_trip_geojson
from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index, create_trip_geojson, check_extracting
from .gtfs_rt_helper import get_rt_route_statuses, get_next_services

_LOGGER = logging.getLogger(__name__)
Expand Down Expand Up @@ -50,9 +50,31 @@ async def _async_update_data(self) -> dict[str, str]:
"""Get the latest data from GTFS and GTFS relatime, depending refresh interval"""
data = self.config_entry.data
options = self.config_entry.options

previous_data = None if self.data is None else self.data.copy()
_LOGGER.debug("Previous data: %s", previous_data)
_LOGGER.debug("Previous data: %s", previous_data)

self._pygtfs = get_gtfs(
self.hass, DEFAULT_PATH, data, False
)
self._data = {
"schedule": self._pygtfs,
"origin": data["origin"].split(": ")[0],
"destination": data["destination"].split(": ")[0],
"offset": options["offset"] if "offset" in options else 0,
"include_tomorrow": data["include_tomorrow"],
"gtfs_dir": DEFAULT_PATH,
"name": data["name"],
"file": data["file"],
"extracting": False,
"next_departure": {}
}

if check_extracting(self):
_LOGGER.warning("Cannot update this sensor as still unpacking: %s", self._data["file"])
previous_data["extracting"] = True
return previous_data


# determin static + rt or only static (refresh schedule depending)
#1. sensor exists with data but refresh interval not yet reached, use existing data
if previous_data is not None and (datetime.datetime.strptime(previous_data["gtfs_updated_at"],'%Y-%m-%dT%H:%M:%S.%f%z') + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.utcnow() + timedelta(seconds=1) :
Expand All @@ -67,20 +89,6 @@ async def _async_update_data(self) -> dict[str, str]:
# do nothing awaiting refresh interval and use existing data
self._data = previous_data
else:
self._pygtfs = get_gtfs(
self.hass, DEFAULT_PATH, data, False
)
self._data = {
"schedule": self._pygtfs,
"origin": data["origin"].split(": ")[0],
"destination": data["destination"].split(": ")[0],
"offset": options["offset"] if "offset" in options else 0,
"include_tomorrow": data["include_tomorrow"],
"gtfs_dir": DEFAULT_PATH,
"name": data["name"],
"file": data["file"],
}

check_index = await self.hass.async_add_executor_job(
check_datasource_index, self
)
Expand Down
25 changes: 14 additions & 11 deletions custom_components/gtfs2/gtfs_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,8 @@

def get_next_departure(self):
_LOGGER.debug("Get next departure with data: %s", self._data)
gtfs_dir = self.hass.config.path(self._data["gtfs_dir"])
filename = self._data["file"]
journal = os.path.join(gtfs_dir, filename + ".sqlite-journal")
if os.path.exists(journal) :
_LOGGER.error("Cannot use this datasource as still unpacking: %s", filename)
if check_extracting(self):
_LOGGER.warning("Cannot get next depurtures on this datasource as still unpacking: %s", self._data["file"])
return {}

"""Get next departures from data."""
Expand Down Expand Up @@ -335,7 +332,7 @@ def get_gtfs(hass, path, data, update=False):
sqlite = data["file"] + ".sqlite"
journal = os.path.join(gtfs_dir, filename + ".sqlite-journal")
if os.path.exists(journal) and not update :
_LOGGER.warning("Cannot use this datasource as still unpacking %s", filename)
_LOGGER.warning("Cannot use this datasource as still unpacking: %s", filename)
return "extracting"
if update and data["extract_from"] == "url" and os.path.exists(os.path.join(gtfs_dir, file)):
remove_datasource(hass, path, filename)
Expand Down Expand Up @@ -430,15 +427,21 @@ def remove_datasource(hass, path, filename):
os.remove(os.path.join(gtfs_dir, filename + ".zip"))
os.remove(os.path.join(gtfs_dir, filename + ".sqlite"))
return "removed"


def check_datasource_index(self):
_LOGGER.debug("Check datasource with data: %s", self._data)

def check_extracting(self):
gtfs_dir = self.hass.config.path(self._data["gtfs_dir"])
filename = self._data["file"]
journal = os.path.join(gtfs_dir, filename + ".sqlite-journal")
if os.path.exists(journal) :
_LOGGER.warning("Cannot check indexes on this datasource as still unpacking: %s", filename)
_LOGGER.debug("check extracting: yes")
return True
return False


def check_datasource_index(self):
_LOGGER.debug("Check datasource with data: %s", self._data)
if check_extracting(self):
_LOGGER.warning("Cannot check indexes on this datasource as still unpacking: %s", self._data["file"])
return
schedule=self._pygtfs
sql_index_1 = f"""
Expand Down
21 changes: 3 additions & 18 deletions custom_components/gtfs2/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,7 @@ def _update_attrs(self): # noqa: C901 PLR0911
_LOGGER.debug(f"SENSOR update attr DATA: {self.coordinator.data}")
self._pygtfs = self.coordinator.data["schedule"]
self.origin = self.coordinator.data["origin"].split(": ")[0]
self.extracting = self.coordinator.data["extracting"]
self.destination = self.coordinator.data["destination"].split(": ")[0]
self._include_tomorrow = self.coordinator.data["include_tomorrow"]
self._offset = self.coordinator.data["offset"]
Expand All @@ -132,34 +133,18 @@ def _update_attrs(self): # noqa: C901 PLR0911
self._trip = None
self._route = None
self._agency = None
# Fetch valid stop information once
if not self._origin and self._departure:
stops = self._pygtfs.stops_by_id(self.origin)
if not stops:
self._available = False
_LOGGER.warning("Origin stop ID %s not found", self.origin)
return
self._origin = stops[0]

if not self._destination and self._departure:
stops = self._pygtfs.stops_by_id(self.destination)
if not stops:
self._available = False
_LOGGER.warning("Destination stop ID %s not found", self.destination)
return
self._destination = stops[0]

# Fetch trip and route details once, unless updated
if not self._departure:
self._trip = None
else:
trip_id = self._departure["trip_id"]
if not self._trip or self._trip.trip_id != trip_id:
if not self.extracting and (not self._trip or self._trip.trip_id != trip_id):
_LOGGER.debug("Fetching trip details for %s", trip_id)
self._trip = self._pygtfs.trips_by_id(trip_id)[0]

route_id = self._departure["route_id"]
if not self._route or self._route.route_id != route_id:
if not self.extracting and (not self._route or self._route.route_id != route_id):
_LOGGER.debug("Fetching route details for %s", route_id)
self._route = self._pygtfs.routes_by_id(route_id)[0]

Expand Down

0 comments on commit 614c97a

Please sign in to comment.