Skip to content

Commit

Permalink
-rename things
Browse files Browse the repository at this point in the history
  • Loading branch information
nick-harder committed Nov 25, 2024
1 parent 3cc85e0 commit 1eaf736
Showing 1 changed file with 9 additions and 6 deletions.
15 changes: 9 additions & 6 deletions assume/common/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ class WriteOutput(Role):
learning_mode (bool, optional): Indicates if the simulation is in learning mode. Defaults to False.
perform_evaluation (bool, optional): Indicates if the simulation is in evaluation mode. Defaults to False.
additional_kpis (dict[str, OutputDef], optional): makes it possible to define additional kpis evaluated
max_dfs_size_mb (int, optional): The maximum storage size for storing output data before saving it. Defaults to 250 MB.
"""

def __init__(
Expand All @@ -59,6 +60,7 @@ def __init__(
learning_mode: bool = False,
perform_evaluation: bool = False,
additional_kpis: dict[str, OutputDef] = {},
max_dfs_size_mb: int = 250,
):
super().__init__()

Expand Down Expand Up @@ -95,8 +97,9 @@ def __init__(
# construct all timeframe under which hourly values are written to excel and db
self.start = start
self.end = end
self.current_size_bytes = 0
self.max_size_mb = 300

self.max_dfs_size = max_dfs_size_mb * 1024 * 1024
self.current_dfs_size = 0

# initializes dfs for storing and writing asynchronous
self.write_dfs: dict = defaultdict(list)
Expand Down Expand Up @@ -241,9 +244,9 @@ def handle_output_message(self, content: dict, meta: MetaDict):
self.write_flows(content_data)

# # keep track of the memory usage of the data
self.current_size_bytes += self.calculate_content_size(content_data)
# if the current size is larger than self.max_size_mb, store the data
if self.current_size_bytes > self.max_size_mb * 1024 * 1024:
self.current_dfs_size += self.calculate_content_size(content_data)
# if the current size is larger than self.max_dfs_size, store the data
if self.current_dfs_size > self.max_dfs_size:
logger.debug("storing output data due to size limit")
self.context.schedule_instant_task(coroutine=self.store_dfs())

Check warning on line 251 in assume/common/outputs.py

View check run for this annotation

Codecov / codecov/patch

assume/common/outputs.py#L250-L251

Added lines #L250 - L251 were not covered by tests

Expand Down Expand Up @@ -323,7 +326,7 @@ async def store_dfs(self):

self.write_dfs[table] = []

self.current_size_bytes = 0
self.current_dfs_size = 0

def store_grid(
self,
Expand Down

0 comments on commit 1eaf736

Please sign in to comment.