Skip to content

Commit

Permalink
Standardize Logging
Browse files Browse the repository at this point in the history
  • Loading branch information
Hemant-Shashikant-Yadav committed Nov 15, 2024
1 parent d05949a commit 2497fa6
Show file tree
Hide file tree
Showing 2 changed files with 70 additions and 6 deletions.
13 changes: 13 additions & 0 deletions effortless/configuration.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import logging
from typing import Any, Dict, List, Optional
import os

# Configure logging
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)

class EffortlessConfig:
"""
Expand Down Expand Up @@ -55,6 +59,11 @@ def debug(self) -> bool:
@debug.setter
def debug(self, value: bool) -> None:
self._debug = bool(value)

if self._debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)

@property
def required_fields(self) -> List[str]:
Expand Down Expand Up @@ -201,6 +210,10 @@ def validate_db(self, db) -> None:
Raises:
ValueError: If the database size exceeds max_size or if any entry is missing required fields.
"""

if self.debug:
logger.debug(f"Validating database with max_size={self.max_size} and required_fields={self.required_fields}.")

# Check max_size
if self.max_size is not None:
current_size = os.path.getsize(db._storage_file) / (
Expand Down
63 changes: 57 additions & 6 deletions effortless/effortless.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ def __init__(self, db_name: str = "db", encryption_key: Optional[str] = None):
self._autoconfigure()
self._operation_count = 0
self._backup_thread = None

logger.debug(f"EffortlessDB initialized with db_name={db_name}, encryption_key={encryption_key}")

@property
def config(self):
Expand Down Expand Up @@ -98,9 +100,11 @@ def encrypt(self, new_key: str) -> None:
self._reencrypt_db(old_key, new_key)
except ValueError as e:
self._encryption_key = old_key # Revert to old key
logger.error(f"Failed to re-encrypt the database: {e}")
raise e
else:
self.config.encrypted = True
logger.info(f"Encryption key set for new database encryption.")

def _reencrypt_db(self, old_key: Optional[str], new_key: str) -> None:
"""
Expand All @@ -115,8 +119,13 @@ def _reencrypt_db(self, old_key: Optional[str], new_key: str) -> None:
Raises:
ValueError: If unable to decrypt the database with either the old or new key.
"""
data = self._read_db(try_keys=[old_key, new_key])
self._write_db(data, force_encrypt=True)
try:
data = self._read_db(try_keys=[old_key, new_key])
self._write_db(data, force_encrypt=True)
logger.info("Database successfully re-encrypted with new key.")
except ValueError as e:
logger.error(f"Failed to re-encrypt the database: {e}")
raise

@staticmethod
def default_db():
Expand Down Expand Up @@ -156,6 +165,8 @@ def set_directory(self, directory: str) -> None:

self._storage_directory = directory
self._update_storage_file()
logger.info(f"Database directory set to {directory}")


def set_storage(self, db_name: str) -> None:
"""
Expand Down Expand Up @@ -186,6 +197,8 @@ def set_storage(self, db_name: str) -> None:

self._storage_filename = f"{db_name}.effortless"
self._update_storage_file()
logger.info(f"Storage file set to {self._storage_filename}")


def _update_storage_file(self) -> None:
"""
Expand All @@ -206,6 +219,7 @@ def _update_storage_file(self) -> None:
self._storage_file = self._storage_filename

self._autoconfigure() # configure EffortlessConfig to the new file's configuration
logger.debug(f"Storage file path updated to {self._storage_file}")

def _autoconfigure(self) -> None:
"""
Expand Down Expand Up @@ -238,6 +252,7 @@ def _migrate(self):
config.version = EffortlessConfig.CURRENT_VERSION
new_data = {"headers": config.to_dict(), "content": content}
self._write_db(new_data)
logger.info("Database migration completed to current version.")

def _update_config(self):
"""
Expand All @@ -248,6 +263,7 @@ def _update_config(self):
new_config = EffortlessConfig.from_dict(data["headers"])
new_config.validate_db(self)
self._config = new_config
logger.debug("Configuration updated successfully.")
except (ValueError, KeyError) as e:
logger.error(f"Invalid configuration in database file: {e}")
# Optionally, you could reset to a default configuration here
Expand All @@ -265,6 +281,7 @@ def configure(self, new_config: EffortlessConfig) -> None:
data["headers"] = new_config.to_dict()
self._write_db(data, write_in_readonly=True)
self._config = new_config
logger.info(f"Database configuration updated successfully.")

def get_all(self) -> List[Dict[str, Any]]:
"""
Expand All @@ -276,6 +293,7 @@ def get_all(self) -> List[Dict[str, Any]]:
Returns:
List[Dict[str, Any]]: A list where each entry is a record in the database.
"""
logger.debug("Fetching all records from the database.")
return self._read_db()["content"]

def filter(self, query: Query) -> List[Dict[str, Any]]:
Expand All @@ -291,6 +309,7 @@ def filter(self, query: Query) -> List[Dict[str, Any]]:
Returns:
List[Dict[str, Any]]: A list of records that match the query criteria.
"""
logger.debug(f"Filtering records based on query: {query}")
return [entry for entry in self.get_all() if query.match(entry)]

def add(self, entry: dict) -> None:
Expand Down Expand Up @@ -343,6 +362,7 @@ def add(self, entry: dict) -> None:
data["content"].append(entry)
self._write_db(data)
self._handle_backup()
logger.debug(f"Entry added to database: {entry}")

def wipe(self, wipe_readonly: bool = False) -> None:
"""
Expand All @@ -362,6 +382,7 @@ def wipe(self, wipe_readonly: bool = False) -> None:
write_in_readonly=wipe_readonly,
)
self._update_config()
logger.info("Database wiped successfully.")

def _read_db(
self, try_keys: Optional[List[Optional[str]]] = None
Expand Down Expand Up @@ -497,6 +518,7 @@ def _handle_backup(self) -> None:
and self._operation_count >= self.config.backup_interval
):
self._operation_count = 0
logger.debug(f"Backup interval reached. Starting backup...")

# If a backup thread is already running, we can stop it
if self._backup_thread and self._backup_thread.is_alive():
Expand All @@ -523,8 +545,14 @@ def finish_backup(self, timeout: Optional[float] = None) -> bool:
False if the timeout was reached before the backup completed.
"""
if self._backup_thread and self._backup_thread.is_alive():
logger.debug(f"Waiting for backup thread to finish...")
self._backup_thread.join(timeout)
if not self._backup_thread.is_alive():
logger.debug("Backup completed.")
else:
logger.warning(f"Backup thread did not complete within timeout of {timeout}s.")
return not self._backup_thread.is_alive()
logger.debug("No backup thread is running.")
return True

def _backup(self) -> bool:
Expand Down Expand Up @@ -556,6 +584,7 @@ def _backup(self) -> bool:
logger.error(f"Backup failed: {str(e)}")
return False # Indicate failure

logger.warning("Backup path not configured, skipping backup.")
return False

def _compress_data(self, data: Union[str, List[Dict[str, Any]]]) -> str:
Expand All @@ -571,6 +600,8 @@ def _compress_data(self, data: Union[str, List[Dict[str, Any]]]) -> str:
if isinstance(data, list):
data = json.dumps(data)
compressed = zlib.compress(data.encode())
encoded_data = base64.b64encode(compressed).decode()
logger.debug(f"Data compressed to {len(encoded_data)} characters.")
return base64.b64encode(compressed).decode()

def _decompress_data(self, data: str) -> List[Dict[str, Any]]:
Expand All @@ -586,7 +617,9 @@ def _decompress_data(self, data: str) -> List[Dict[str, Any]]:
List[Dict[str, Any]]: The decompressed and parsed data.
"""
decompressed = zlib.decompress(base64.b64decode(data))
return json.loads(decompressed.decode())
decoded_data = json.loads(decompressed.decode())
logger.debug(f"Data decompressed and parsed. {len(decoded_data)} entries found.")
return decoded_data

def _encrypt_data(self, data: Union[str, Dict[str, Any]], key: str) -> str:
"""
Expand All @@ -600,7 +633,9 @@ def _encrypt_data(self, data: Union[str, Dict[str, Any]], key: str) -> str:
str: The encrypted data as a string.
"""
fernet = Fernet(self._get_fernet_key(key))
return fernet.encrypt(json.dumps(data).encode()).decode()
encrypted_data = fernet.encrypt(json.dumps(data).encode()).decode()
logger.debug("Data encrypted successfully.")
return encrypted_data

def _decrypt_data(self, data: str, key: str) -> Dict[str, Any]:
"""
Expand All @@ -617,7 +652,13 @@ def _decrypt_data(self, data: str, key: str) -> Dict[str, Any]:
InvalidToken: If the decryption fails due to an invalid key.
"""
fernet = Fernet(self._get_fernet_key(key))
return json.loads(fernet.decrypt(data.encode()).decode())
try:
decrypted_data = fernet.decrypt(data.encode()).decode()
logger.debug("Data decrypted successfully.")
return json.loads(decrypted_data)
except InvalidToken:
logger.error("Decryption failed: Invalid encryption key.")
raise

@staticmethod
def _get_fernet_key(key: str) -> bytes:
Expand Down Expand Up @@ -662,6 +703,7 @@ def update(self, update_data: Dict[str, Any], condition: Query) -> bool:
data["content"][index].update(update_data)
self._write_db(data)
self._handle_backup()
logger.debug(f"Entry updated with data: {update_data}")
return True

def batch(self, update_data: Dict[str, Any], condition: Query) -> int:
Expand All @@ -686,6 +728,9 @@ def batch(self, update_data: Dict[str, Any], condition: Query) -> int:
if updated_count > 0:
self._write_db(data)
self._handle_backup()
logger.debug(f"{updated_count} entries updated.")
else:
logger.debug("No entries updated.")

return updated_count

Expand All @@ -708,15 +753,18 @@ def remove(self, condition: Query) -> bool:
]

if len(matching_entries) > 1:
logger.error("More than one entry matches the given condition.")
raise ValueError(
"More than one entry matches the given condition. If you want to remove multiple entries at once, use erase() instead."
)
elif len(matching_entries) == 0:
logger.debug("No matching entry found to remove.")
return False

data["content"].remove(matching_entries[0])
self._write_db(data)
self._handle_backup()
logger.debug("Entry removed from the database.")
return True

def erase(self, condition: Query) -> int:
Expand All @@ -739,7 +787,9 @@ def erase(self, condition: Query) -> int:
if removed_count > 0:
self._write_db(data)
self._handle_backup()
logger.debug(f"Erased {removed_count} entries from the database")
logger.debug(f"Erased {removed_count} entries from the database.")
else:
logger.debug("No entries matching the condition to erase.")

return removed_count

Expand Down Expand Up @@ -781,6 +831,7 @@ def unencrypt(self) -> None:
data = self._read_db()
if not data["headers"]["encrypted"]:
self._encryption_key = None
logger.debug("Database is not encrypted, nothing to unencrypt.")
return
if self._encryption_key is None:
raise ValueError("No encryption key set")
Expand Down

0 comments on commit 2497fa6

Please sign in to comment.