Skip to content

Commit

Permalink
chore: Remove aiofiles dependency
Browse files Browse the repository at this point in the history
We only write very small files. Unless the artifactsdir is located on a,
e.g., very slow network drive, it is very unlikely that the eventloop is
blocked by these writes. Usually only a few bytes are written to
dumpfiles or to separate result files.

Let's use the stdlib instead and get rid of a further dependency.
  • Loading branch information
rumpelsepp committed Dec 17, 2024
1 parent 5d1d222 commit 54f2ba7
Show file tree
Hide file tree
Showing 7 changed files with 24 additions and 72 deletions.
2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ classifiers = [
]
requires-python = ">=3.12,<3.14"
dependencies = [
"aiofiles >=24.1.0,<25.0",
"aiosqlite >=0.18",
"argcomplete >=2,<4",
"boltons>=24.1.0",
Expand Down Expand Up @@ -65,7 +64,6 @@ dev = [
"reuse >=4.0,<5.0",
"ruff >=0.8.0",
"sphinx-rtd-theme >=3",
"types-aiofiles >=23.1,<25.0",
"types-tabulate >=0.9,<0.10",
]

Expand Down
12 changes: 3 additions & 9 deletions src/gallia/command/uds.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import json
from abc import ABC

import aiofiles
from pydantic import field_validator

from gallia.command.base import FileNames, Scanner, ScannerConfig
Expand Down Expand Up @@ -138,9 +137,7 @@ async def setup(self) -> None:

if self.config.properties is True:
path = self.artifacts_dir.joinpath(FileNames.PROPERTIES_PRE.value)
async with aiofiles.open(path, "w") as file:
await file.write(json.dumps(await self.ecu.properties(True), indent=4))
await file.write("\n")
path.write_text(json.dumps(await self.ecu.properties(True), indent=4) + "\n")

if self.db_handler is not None:
self._apply_implicit_logging_setting()
Expand All @@ -156,13 +153,10 @@ async def setup(self) -> None:
async def teardown(self) -> None:
if self.config.properties is True and (not self.ecu.transport.is_closed):
path = self.artifacts_dir.joinpath(FileNames.PROPERTIES_POST.value)
async with aiofiles.open(path, "w") as file:
await file.write(json.dumps(await self.ecu.properties(True), indent=4))
await file.write("\n")
path.write_text(json.dumps(await self.ecu.properties(True), indent=4) + "\n")

path_pre = self.artifacts_dir.joinpath(FileNames.PROPERTIES_PRE.value)
async with aiofiles.open(path_pre) as file:
prop_pre = json.loads(await file.read())
prop_pre = json.loads(path_pre.read_text())

if self.config.compare_properties and await self.ecu.properties(False) != prop_pre:
logger.warning("ecu properties differ, please investigate!")
Expand Down
45 changes: 16 additions & 29 deletions src/gallia/commands/discover/doip.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@
from itertools import product
from urllib.parse import parse_qs, urlparse

import aiofiles

from gallia.command import AsyncScript
from gallia.command.base import AsyncScriptConfig
from gallia.command.config import AutoInt, Field
Expand Down Expand Up @@ -294,10 +292,11 @@ async def enumerate_routing_activation_requests(
f"doip://{tgt_hostname}:{tgt_port}?protocol_version={self.protocol_version}&activation_type={routing_activation_type:#x}&src_addr={source_address:#x}"
)
logger.notice(f"[🤯] Holy moly, it actually worked: {targets[-1]}")
async with aiofiles.open(
self.artifacts_dir.joinpath("1_valid_routing_activation_requests.txt"), "a"

with self.artifacts_dir.joinpath("1_valid_routing_activation_requests.txt").open(
"a"
) as f:
await f.write(f"{targets[-1]}\n")
f.write(f"{targets[-1]}\n")

if len(targets) > 0:
logger.notice("[⚔️] It's dangerous to test alone, take one of these:")
Expand Down Expand Up @@ -340,10 +339,8 @@ async def enumerate_target_addresses(
# If we reach this, the request was not denied due to unknown TargetAddress or other DoIP errors
known_targets.append(current_target)
logger.notice(f"[🥈] HEUREKA: target address {target_addr:#x} is valid! ")
async with aiofiles.open(
self.artifacts_dir.joinpath("3_valid_targets.txt"), "a"
) as f:
await f.write(f"{current_target}\n")
with self.artifacts_dir.joinpath("3_valid_targets.txt").open("a") as f:
f.write(f"{current_target}\n")

# Here is where "reader_task" comes into play, which monitors incoming DiagnosticMessage replies

Expand All @@ -354,28 +351,22 @@ async def enumerate_target_addresses(
elif e.nack_code == DiagnosticMessageNegativeAckCodes.TargetUnreachable:
logger.info(f"[💤] {target_addr:#x} is (currently?) unreachable")
unreachable_targets.append(current_target)
async with aiofiles.open(
self.artifacts_dir.joinpath("5_unreachable_targets.txt"), "a"
) as f:
await f.write(f"{current_target}\n")
with self.artifacts_dir.joinpath("5_unreachable_targets.txt").open("a") as f:
f.write(f"{current_target}\n")
continue
else:
logger.warning(
f"[🤷] {target_addr:#x} is behaving strangely: {e.nack_code.name}"
)
async with aiofiles.open(
self.artifacts_dir.joinpath("7_targets_with_errors.txt"), "a"
) as f:
await f.write(f"{target_addr:#x}: {e.nack_code.name}\n")
with self.artifacts_dir.joinpath("7_targets_with_errors.txt").open("a") as f:
f.write(f"{target_addr:#x}: {e.nack_code.name}\n")
continue

except ConnectionError as e:
# Whenever this triggers, but sometimes connections are closed not by us
logger.warning(f"[🫦] Sexy, but unexpected: {target_addr:#x} triggered {e!r}")
async with aiofiles.open(
self.artifacts_dir.joinpath("7_targets_with_errors.txt"), "a"
) as f:
await f.write(f"{target_addr:#x}: {e}\n")
with self.artifacts_dir.joinpath("7_targets_with_errors.txt").open("a") as f:
f.write(f"{target_addr:#x}: {e}\n")
# Re-establish DoIP connection
await conn.close()
await asyncio.sleep(tcp_connect_delay)
Expand Down Expand Up @@ -426,10 +417,8 @@ async def task_read_diagnostic_messages(

if current_target not in responsive_targets:
responsive_targets.append(current_target)
async with aiofiles.open(
self.artifacts_dir.joinpath("4_responsive_targets.txt"), "a"
) as f:
await f.write(f"{current_target}\n")
with self.artifacts_dir.joinpath("4_responsive_targets.txt").open("a") as f:
f.write(f"{current_target}\n")
if self.db_handler is not None:
await self.db_handler.insert_discovery_result(current_target)

Expand Down Expand Up @@ -521,10 +510,8 @@ async def run_udp_discovery(self) -> list[tuple[str, int]]:
for item in found:
url = f"doip://{item[0]}:{item[1]}"
logger.notice(url)
async with aiofiles.open(
self.artifacts_dir.joinpath("0_valid_hosts.txt"), "a"
) as f:
await f.write(f"{url}\n")
with self.artifacts_dir.joinpath("0_valid_hosts.txt").open("a") as f:
f.write(f"{url}\n")
else:
logger.notice(
"[👸] Your princess is in another castle: no DoIP endpoints here it seems..."
Expand Down
8 changes: 3 additions & 5 deletions src/gallia/commands/scan/uds/sa_dump_seeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
import time
from pathlib import Path

import aiofiles

from gallia.command import UDSScanner
from gallia.command.config import AutoInt, Field, HexBytes
from gallia.command.uds import UDSScannerConfig
Expand Down Expand Up @@ -106,7 +104,7 @@ async def main(self) -> None:

i = -1
seeds_file = Path.joinpath(self.artifacts_dir, "seeds.bin")
file = await aiofiles.open(seeds_file, "wb", buffering=0)
file = seeds_file.open("wb", buffering=0)
duration = self.config.duration * 60
start_time = time.time()
last_seed = b""
Expand Down Expand Up @@ -148,7 +146,7 @@ async def main(self) -> None:

logger.info(f"Received seed of length {len(seed)}")

await file.write(seed)
file.write(seed)
if last_seed == seed:
logger.warning("Received the same seed as before")

Expand Down Expand Up @@ -192,6 +190,6 @@ async def main(self) -> None:
logger.info(f"Sleeping for {self.config.sleep} seconds between seed requests…")
await asyncio.sleep(self.config.sleep)

await file.close()
file.close()
self.log_size(seeds_file, time.time() - start_time)
await self.ecu.leave_session(session, sleep=self.config.power_cycle_sleep)
2 changes: 0 additions & 2 deletions src/gallia/dumpcap.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,6 @@ async def stop(self) -> None:
await self.compressor

async def _compressor(self) -> None:
# Gzip support in aiofiles is missing.
# https://github.com/Tinche/aiofiles/issues/46
ready = False
assert self.proc.stdout
with await asyncio.to_thread(gzip.open, self.outfile, "wb") as f:
Expand Down
5 changes: 2 additions & 3 deletions src/gallia/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from typing import TYPE_CHECKING, Any, TypeVar
from urllib.parse import urlparse

import aiofiles
import pydantic
from pydantic.networks import IPvAnyAddress

Expand Down Expand Up @@ -227,9 +226,9 @@ async def write_target_list(
:params db_handler: if given, urls are also written to the database as discovery results
:return: None
"""
async with aiofiles.open(path, "w") as f:
with path.open("w") as f:
for target in targets:
await f.write(f"{target}\n")
f.write(f"{target}\n")

if db_handler is not None:
await db_handler.insert_discovery_result(str(target))
Expand Down
22 changes: 0 additions & 22 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 54f2ba7

Please sign in to comment.