Skip to content

Commit

Permalink
Merge pull request #32 from FocoosAI/feat/add-system-info
Browse files Browse the repository at this point in the history
Add System Information Retrieval
  • Loading branch information
CuriousDolphin authored Dec 20, 2024
2 parents ed7c875 + c2c9b5c commit 4f222c8
Show file tree
Hide file tree
Showing 9 changed files with 407 additions and 25 deletions.
6 changes: 3 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
.PHONY: install install-dev install-pre-commit run-pre-commit
.PHONY: test install install-dev install-pre-commit run-pre-commit

install:
@pip install . --no-cache-dir
Expand All @@ -8,8 +8,8 @@ install-dev:
install-pre-commit:
@pre-commit install
lint:
@isort . --profile=black
@black .
@isort ./focoos --profile=black
@black ./focoos
run-pre-commit:
@pre-commit run --all-files
test:
Expand Down
1 change: 1 addition & 0 deletions focoos/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@
from .local_model import LocalModel
from .ports import *
from .remote_model import RemoteModel
from .utils.system import get_system_info
4 changes: 2 additions & 2 deletions focoos/local_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,12 +110,12 @@ def _annotate(self, im: np.ndarray, detections: Detections) -> np.ndarray:
if classes is not None:
labels = [
f"{classes[int(class_id)]}: {confid*100:.0f}%"
for class_id, confid in zip(detections.class_id, detections.confidence)
for class_id, confid in zip(detections.class_id, detections.confidence) # type: ignore
]
else:
labels = [
f"{str(class_id)}: {confid*100:.0f}%"
for class_id, confid in zip(detections.class_id, detections.confidence)
for class_id, confid in zip(detections.class_id, detections.confidence) # type: ignore
]
if self.metadata.task == FocoosTask.DETECTION:
annotated_im = self.box_annotator.annotate(
Expand Down
52 changes: 52 additions & 0 deletions focoos/ports.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,3 +238,55 @@ class RuntimeTypes(str, Enum):
ONNX_TRT16 = "onnx_trt16"
ONNX_CPU = "onnx_cpu"
ONNX_COREML = "onnx_coreml"


class GPUInfo(FocoosBaseModel):
gpu_id: Optional[int] = None
gpu_name: Optional[str] = None
gpu_memory_total_gb: Optional[float] = None
gpu_memory_used_percentage: Optional[float] = None
gpu_temperature: Optional[float] = None
gpu_load_percentage: Optional[float] = None


class SystemInfo(FocoosBaseModel):
focoos_host: Optional[str] = None
system: Optional[str] = None
system_name: Optional[str] = None
cpu_type: Optional[str] = None
cpu_cores: Optional[int] = None
memory_gb: Optional[float] = None
memory_used_percentage: Optional[float] = None
available_providers: Optional[list[str]] = None
disk_space_total_gb: Optional[float] = None
disk_space_used_percentage: Optional[float] = None
gpu_count: Optional[int] = None
gpu_driver: Optional[str] = None
gpu_cuda_version: Optional[str] = None
gpus_info: Optional[list[GPUInfo]] = None
packages_versions: Optional[dict[str, str]] = None

def pretty_print(self):
print("================ SYSTEM INFO ====================")
for key, value in self.model_dump().items():
if isinstance(value, list):
print(f"{key}:")
if key == "gpus_info": # Formattazione speciale per gpus_info
for item in value:
print(f"- id: {item['gpu_id']}")
for sub_key, sub_value in item.items():
if sub_key != "gpu_id" and sub_value is not None:
formatted_key = sub_key.replace("_", "-")
print(f" - {formatted_key}: {sub_value}")
else:
for item in value:
print(f" - {item}")
elif (
isinstance(value, dict) and key == "packages_versions"
): # Formattazione speciale per packages_versions
print(f"{key}:")
for pkg_name, pkg_version in value.items():
print(f" - {pkg_name}: {pkg_version}")
else:
print(f"{key}: {value}")
print("================================================")
11 changes: 9 additions & 2 deletions focoos/runtime.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
RuntimeTypes,
)
from focoos.utils.logger import get_logger
from focoos.utils.system import get_cpu_name, get_gpu_name

GPU_ID = 0

Expand Down Expand Up @@ -266,6 +267,7 @@ def __init__(
self.dtype = dtype
self.binding = binding
self.ort_sess = ort.InferenceSession(model_path, options, providers=providers)
self.active_providers = self.ort_sess.get_providers()
self.logger.info(
f"[onnxruntime] Active providers:{self.ort_sess.get_providers()}"
)
Expand Down Expand Up @@ -391,15 +393,20 @@ def benchmark(self, iterations=20, size=640) -> LatencyMetrics:
durations.append((end - start) * 1000)
durations = np.array(durations)
# time.sleep(0.1)
provider = self.active_providers[0]
if provider in ["CUDAExecutionProvider", "TensorrtExecutionProvider"]:
device = get_gpu_name()
else:
device = get_cpu_name()
metrics = LatencyMetrics(
fps=int(1000 / durations.mean()),
engine="onnx",
engine=f"onnx.{provider}",
mean=round(durations.mean(), 3),
max=round(durations.max(), 3),
min=round(durations.min(), 3),
std=round(durations.std(), 3),
im_size=size[0],
device="",
device=str(device),
)
self.logger.info(f"🔥 FPS: {metrics.fps}")
return metrics
Expand Down
216 changes: 215 additions & 1 deletion focoos/utils/system.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,42 @@
import importlib.metadata as metadata
import platform
import subprocess
from typing import Optional

import GPUtil
import onnxruntime as ort
import psutil
import requests

from focoos.config import FOCOOS_CONFIG
from focoos.ports import GPUInfo, SystemInfo


class HttpClient:
"""
A simple HTTP client for making GET, POST, and DELETE requests.
This client is initialized with an API key and a host URL, and it
automatically includes the API key in the headers of each request.
Attributes:
api_key (str): The API key for authorization.
host_url (str): The base URL for the API.
default_headers (dict): Default headers including authorization and user agent.
"""

def __init__(
self,
api_key: str,
host_url: str,
):
"""
Initialize the HttpClient with an API key and host URL.
Args:
api_key (str): The API key for authorization.
host_url (str): The base URL for the API.
"""
self.api_key = api_key
self.host_url = host_url

Expand All @@ -19,7 +45,22 @@ def __init__(
"user_agent": "focoos/0.0.1",
}

def get_external_url(self, path: str, params: dict = None, stream: bool = False):
def get_external_url(
self, path: str, params: Optional[dict] = None, stream: bool = False
):
"""
Perform a GET request to an external URL.
Args:
path (str): The URL path to request.
params (Optional[dict], optional): Query parameters for the request. Defaults to None.
stream (bool, optional): Whether to stream the response. Defaults to False.
Returns:
Response: The response object from the requests library.
"""
if params is None:
params = {}
return requests.get(path, params=params, stream=stream)

def get(
Expand All @@ -29,6 +70,18 @@ def get(
extra_headers: Optional[dict] = None,
stream: bool = False,
):
"""
Perform a GET request to the specified path on the host URL.
Args:
path (str): The URL path to request.
params (Optional[dict], optional): Query parameters for the request. Defaults to None.
extra_headers (Optional[dict], optional): Additional headers to include in the request. Defaults to None.
stream (bool, optional): Whether to stream the response. Defaults to False.
Returns:
Response: The response object from the requests library.
"""
url = f"{self.host_url}/{path}"
headers = self.default_headers
if extra_headers:
Expand All @@ -42,15 +95,176 @@ def post(
extra_headers: Optional[dict] = None,
files=None,
):
"""
Perform a POST request to the specified path on the host URL.
Args:
path (str): The URL path to request.
data (Optional[dict], optional): The JSON data to send in the request body. Defaults to None.
extra_headers (Optional[dict], optional): Additional headers to include in the request. Defaults to None.
files (optional): Files to send in the request. Defaults to None.
Returns:
Response: The response object from the requests library.
"""
url = f"{self.host_url}/{path}"
headers = self.default_headers
if extra_headers:
headers.update(extra_headers)
return requests.post(url, headers=headers, json=data, files=files)

def delete(self, path: str, extra_headers: Optional[dict] = None):
"""
Perform a DELETE request to the specified path on the host URL.
Args:
path (str): The URL path to request.
extra_headers (Optional[dict], optional): Additional headers to include in the request. Defaults to None.
Returns:
Response: The response object from the requests library.
"""
url = f"{self.host_url}/{path}"
headers = self.default_headers
if extra_headers:
headers.update(extra_headers)
return requests.delete(url, headers=headers)


def get_cuda_version() -> Optional[str]:
"""
Retrieve the CUDA version installed on the system.
This function runs the `nvidia-smi` command to fetch the CUDA version.
If the command executes successfully and the CUDA version is found in the output,
it returns the version as a string. If the command fails or the CUDA version is not found,
it returns None.
Returns:
Optional[str]: The CUDA version if available, otherwise None.
"""
try:
result = subprocess.run(
["nvidia-smi"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)

if result.returncode == 0:
output = result.stdout
for line in output.splitlines():
if "CUDA Version" in line:
cuda_version = line.split(":")[-1].strip()
cuda_version = cuda_version.split()[0]
return cuda_version
return None
else:
return None
except FileNotFoundError:
return None


def get_gpu_name() -> Optional[str]:
"""
Retrieve the name of the first available GPU.
This function uses the GPUtil library to get the name of the first GPU detected.
If no GPUs are available, it returns None.
Returns:
Optional[str]: The name of the first GPU if available, otherwise None.
"""
try:
return GPUtil.getGPUs()[0].name
except IndexError:
return None


def get_cpu_name() -> Optional[str]:
"""
Retrieve the name of the CPU.
This function uses the psutil library to get the name of the CPU.
If no CPU is available, it returns None.
Returns:
Optional[str]: The name of the CPU if available, otherwise None.
"""
return platform.processor()


def get_system_info() -> SystemInfo:
"""
Gather and return comprehensive system information.
This function collects various system metrics including CPU, memory, disk,
and GPU details, as well as installed package versions. It returns this
information encapsulated in a SystemInfo object.
Returns:
SystemInfo: An object containing detailed information about the system's
hardware and software configuration, including:
- System and node name
- CPU type and core count
- Available ONNXRuntime providers
- Memory and disk usage statistics
- GPU count, driver, and CUDA version
- Detailed GPU information if available
- Versions of key installed packages
"""
system_info = platform.uname()
memory_info = psutil.virtual_memory()
disk_info = psutil.disk_usage("/")
gpu_info = GPUtil.getGPUs()
if len(gpu_info) == 0:
gpu_count = 0
gpu_driver = None
gpus_info = None
else:
gpu_count = len(gpu_info)
gpu_driver = gpu_info[0].driver
gpus_info = []
for i, gpu in enumerate(gpu_info):
gpus_info.append(
GPUInfo(
gpu_id=i,
gpu_name=gpu.name,
gpu_memory_total_gb=round(gpu.memoryTotal / 1024, 3),
gpu_memory_used_percentage=round(gpu.memoryUsed / 1024, 3),
gpu_temperature=gpu.temperature,
gpu_load_percentage=gpu.load * 100,
)
)
packages = [
"focoos",
"tensorrt",
"onnxruntime",
"onnxruntime-gpu",
"numpy",
"opencv-python",
"pillow",
"supervision",
"pydantic",
]
versions = {}
for package in packages:
try:
versions[package] = metadata.version(package)
except metadata.PackageNotFoundError:
versions[package] = "unknown"

return SystemInfo(
focoos_host=FOCOOS_CONFIG.default_host_url,
system=system_info.system,
system_name=system_info.node,
cpu_type=system_info.machine,
cpu_cores=psutil.cpu_count(logical=True),
available_providers=ort.get_available_providers(),
memory_gb=round(memory_info.total / (1024**3), 3),
memory_used_percentage=round(memory_info.percent, 3),
disk_space_total_gb=round(disk_info.total / (1024**3), 3),
disk_space_used_percentage=round(disk_info.percent, 3),
gpu_count=gpu_count,
gpu_driver=gpu_driver,
gpu_cuda_version=get_cuda_version(),
gpus_info=gpus_info,
packages_versions=versions,
)
Loading

0 comments on commit 4f222c8

Please sign in to comment.