diff --git a/docs/40-having-two-modules-with-the-same-variable-in-an-agent-can-result-in-the-default-variable-callback-being-removed-for-the-first-module/coverage/d_6d86c0898c126a4f_agent_logger_py.html b/docs/40-having-two-modules-with-the-same-variable-in-an-agent-can-result-in-the-default-variable-callback-being-removed-for-the-first-module/coverage/d_6d86c0898c126a4f_agent_logger_py.html deleted file mode 100644 index b8f061c..0000000 --- a/docs/40-having-two-modules-with-the-same-variable-in-an-agent-can-result-in-the-default-variable-callback-being-removed-for-the-first-module/coverage/d_6d86c0898c126a4f_agent_logger_py.html +++ /dev/null @@ -1,285 +0,0 @@ - - -
- « prev - ^ index - » next - - coverage.py v7.4.4, - created at 2024-11-07 12:02 +0000 -
1"""This module contains a custom Module to log
2all variables inside an agent's data_broker."""
3
4import collections
5import json
6import os
7import logging
8from ast import literal_eval
9from typing import Union
10
11from pydantic import field_validator, Field
12import pandas as pd
13
14from agentlib import AgentVariable
15from agentlib.core import BaseModule, Agent, BaseModuleConfig
16
17
18logger = logging.getLogger(__name__)
19
20
21class AgentLoggerConfig(BaseModuleConfig):
22 """Define parameters for the AgentLogger"""
23
24 filename: str = Field(
25 title="filename",
26 default=None, # Set later when agent_id is available
27 description="The filename where the log is stored.",
28 )
29 t_sample: Union[float, int] = Field(
30 title="t_sample",
31 default=300,
32 description="The log is saved every other t_sample seconds.",
33 )
34 values_only: bool = Field(
35 title="values_only",
36 default=True,
37 description="If True, only the values are logged. Else, all"
38 "fields in the AgentVariable are logged.",
39 )
40 clean_up: bool = Field(
41 title="clean_up",
42 default=True,
43 description="If True, file is deleted once load_log is called.",
44 )
45
46 @field_validator("filename")
47 @classmethod
48 def check_existence_of_file(cls, filename):
49 """Checks whether the file already exists."""
50 # pylint: disable=no-self-argument,no-self-use
51 if os.path.exists(filename):
52 logger.error(
53 "Specified filename already exists. "
54 "The AgentLogger will append to the file."
55 )
56 return filename
57
58
59class AgentLogger(BaseModule):
60 """
61 A custom logger for Agents to write variables
62 which are updated in data_broker into a file.
63 """
64
65 config: AgentLoggerConfig
66
67 def __init__(self, *, config: dict, agent: Agent):
68 """Overwrite init to enable a custom default filename
69 which uses the agent_id."""
70 super().__init__(config=config, agent=agent)
71 self._filename = self.config.filename
72 if self._filename is None:
73 self._filename = os.path.join(
74 os.getcwd(), f"Agent_{self.agent.id}_Logger.log"
75 )
76 self._variables_to_log = {}
77 if not self.env.config.rt and self.config.t_sample < 60:
78 self.logger.warning(
79 "Sampling time of agent_logger %s is very low %s. This can hinder "
80 "performance.",
81 self.id,
82 self.config.t_sample,
83 )
84
85 @property
86 def filename(self):
87 """Return the filename where to log."""
88 return self._filename
89
90 def process(self):
91 """Calls the logger every other t_sample
92 is used."""
93 while True:
94 self._log()
95 yield self.env.timeout(self.config.t_sample)
96
97 def register_callbacks(self):
98 """Callbacks trigger the log_cache function"""
99 callback = (
100 self._callback_values if self.config.values_only else self._callback_full
101 )
102 self.agent.data_broker.register_callback(
103 alias=None, source=None, callback=callback
104 )
105
106 def _callback_values(self, variable: AgentVariable):
107 """Save variable values to log later."""
108 if not isinstance(variable.value, (float, int, str)):
109 return
110 current_time = self._variables_to_log.setdefault(str(self.env.time), {})
111 # we merge alias and source tuple into a string so we can .json it
112 current_time[str((variable.alias, str(variable.source)))] = variable.value
113
114 def _callback_full(self, variable: AgentVariable):
115 """Save full variable to log later."""
116 current_time = self._variables_to_log.setdefault(str(self.env.time), {})
117 current_time[str((variable.alias, str(variable.source)))] = variable.dict()
118
119 def _log(self):
120 """Writes the currently in memory saved values to file"""
121 _variables_to_log = self._variables_to_log
122 self._variables_to_log = {}
123 with open(self.filename, "a") as file:
124 json.dump(_variables_to_log, file)
125 file.write("\n")
126
127 @classmethod
128 def load_from_file(
129 cls, filename: str, values_only: bool = True, merge_sources: bool = True
130 ) -> pd.DataFrame:
131 """Loads the log file and consolidates it as a pandas DataFrame.
132
133 Args:
134 filename: The file to load
135 values_only: If true, loads a file that only has values saved (default True)
136 merge_sources: When there are variables with the same alias from multiple
137 sources, they are saved in different columns. For backwards
138 compatibility, they are merged into a single column. However, if you
139 specify False for this parameter, you can view them separately,
140 resulting in a multi-indexed return column index
141
142 """
143 chunks = []
144 with open(filename, "r") as file:
145 for data_line in file.readlines():
146 chunks.append(json.loads(data_line))
147 full_dict = collections.ChainMap(*chunks)
148 df = pd.DataFrame.from_dict(full_dict, orient="index")
149 df.index = df.index.astype(float)
150 columns = (literal_eval(column) for column in df.columns)
151 df.columns = pd.MultiIndex.from_tuples(columns)
152
153 if not values_only:
154
155 def _load_agent_variable(var):
156 try:
157 return AgentVariable.validate_data(var)
158 except TypeError:
159 pass
160
161 df = df.applymap(_load_agent_variable)
162
163 if merge_sources:
164 df = df.droplevel(1, axis=1)
165 df = df.loc[:, ~df.columns.duplicated(keep="first")]
166 return df.sort_index()
167
168 def get_results(self) -> pd.DataFrame:
169 """Load the own filename"""
170 return self.load_from_file(
171 filename=self.filename, values_only=self.config.values_only
172 )
173
174 def cleanup_results(self):
175 """Deletes the log if wanted."""
176 if self.config.clean_up:
177 try:
178 os.remove(self.filename)
179 except OSError:
180 self.logger.error(
181 "Could not delete filename %s. Please delete it yourself.",
182 self.filename,
183 )
184
185 def terminate(self):
186 # when terminating, we log one last time, since otherwise the data since the
187 # last log interval is lost
188 self._log()