diff --git a/invokeai/app/api/routers/app_info.py b/invokeai/app/api/routers/app_info.py
index 53249363d4e..75ec6e0623d 100644
--- a/invokeai/app/api/routers/app_info.py
+++ b/invokeai/app/api/routers/app_info.py
@@ -13,6 +13,7 @@
from invokeai.app.api.dependencies import ApiDependencies
from invokeai.app.invocations.upscale import ESRGAN_MODELS
from invokeai.app.services.invocation_cache.invocation_cache_common import InvocationCacheStatus
+from invokeai.app.services.system_stats.system_stats import SystemStats, get_system_stats
from invokeai.backend.image_util.infill_methods.patchmatch import PatchMatch
from invokeai.backend.util.logging import logging
from invokeai.version import __version__
@@ -182,3 +183,10 @@ async def disable_invocation_cache() -> None:
async def get_invocation_cache_status() -> InvocationCacheStatus:
"""Clears the invocation cache"""
return ApiDependencies.invoker.services.invocation_cache.get_status()
+
+
+@app_router.get("/system-stats", operation_id="get_system_stats", status_code=200, response_model=SystemStats)
+async def get_stats() -> SystemStats:
+ """Fetches and returns the system statistics, including CPU, RAM, and GPU stats."""
+ stats = get_system_stats()
+ return stats
diff --git a/invokeai/app/services/system_stats/system_stats.py b/invokeai/app/services/system_stats/system_stats.py
new file mode 100644
index 00000000000..dabcaf3541e
--- /dev/null
+++ b/invokeai/app/services/system_stats/system_stats.py
@@ -0,0 +1,128 @@
+import platform
+import subprocess
+from typing import List, Optional
+
+import psutil
+from pydantic import BaseModel
+
+
+class GPUStat(BaseModel):
+ id: int
+ load: float
+ memory: float
+ memory_total: float
+ temperature: Optional[float]
+
+
+class SystemStats(BaseModel):
+ cpu_usage: float
+ ram_usage: float
+ gpu_usage: Optional[List[GPUStat]]
+
+
+# Function to fetch NVIDIA GPU stats (including temperature)
+def get_nvidia_stats() -> Optional[List[GPUStat]]:
+ try:
+ result = subprocess.run(
+ [
+ "nvidia-smi",
+ "--query-gpu=index,utilization.gpu,memory.used,memory.total,temperature.gpu",
+ "--format=csv,noheader,nounits",
+ ],
+ capture_output=True,
+ text=True,
+ )
+ output_lines = result.stdout.splitlines()
+
+ gpu_usage = []
+ for line in output_lines:
+ parts = line.split(", ")
+ if len(parts) == 5:
+ gpu_id = int(parts[0])
+ load = float(parts[1])
+ memory_used = float(parts[2])
+ memory_total = float(parts[3])
+ temperature = float(parts[4])
+ gpu_usage.append(
+ GPUStat(
+ id=gpu_id, load=load, memory=memory_used, memory_total=memory_total, temperature=temperature
+ )
+ )
+ return gpu_usage if gpu_usage else None
+ except Exception as e:
+ print(f"Error fetching NVIDIA GPU stats: {e}")
+ return None
+
+
+# Function to fetch AMD GPU stats (including temperature)
+def get_amd_stats() -> Optional[List[GPUStat]]:
+ try:
+ result = subprocess.run(["rocm-smi", "--showuse", "--showtemp"], capture_output=True, text=True)
+ output_lines = result.stdout.splitlines()
+
+ gpu_usage = []
+ for line in output_lines:
+ if "GPU" in line:
+ parts = line.split()
+ if len(parts) >= 5:
+ gpu_id = int(parts[0])
+ load = float(parts[1])
+ memory_used = float(parts[2])
+ memory_total = float(parts[3])
+ temperature = float(parts[4])
+ gpu_usage.append(
+ GPUStat(
+ id=gpu_id, load=load, memory=memory_used, memory_total=memory_total, temperature=temperature
+ )
+ )
+ return gpu_usage if gpu_usage else None
+ except Exception as e:
+ print(f"Error fetching AMD GPU stats: {e}")
+ return None
+
+
+# Function to fetch Mac MPS GPU stats (placeholder, needs someone with Mac knowledge)
+def get_mps_stats() -> Optional[List[GPUStat]]:
+ try:
+ # Using ioreg to get MPS stats on macOS
+ result = subprocess.run(["ioreg", "-r", "-d", "AppleGPU"], capture_output=True, text=True)
+ output_lines = result.stdout.splitlines()
+
+ gpu_usage = []
+ for line in output_lines:
+ if "AppleGPU" in line:
+ # Placeholder logic for parsing; needs to be implemented based on actual ioreg output
+ gpu_id = len(gpu_usage)
+ load = 60.0
+ memory_used = 8192
+ memory_total = 16384
+ gpu_usage.append(GPUStat(id=gpu_id, load=load, memory=memory_used, memory_total=memory_total))
+ return gpu_usage if gpu_usage else None
+ except Exception as e:
+ print(f"Error fetching MPS GPU stats: {e}")
+ return None
+
+
+# Function to fetch system stats (CPU, RAM, GPU, and temperature)
+def get_system_stats() -> SystemStats:
+ cpu_usage = psutil.cpu_percent(interval=1)
+ ram_usage = psutil.virtual_memory().used / (1024**2)
+
+ gpu_usage = None
+ system_type = platform.system()
+
+ if system_type in ["Windows", "Linux"]:
+ gpu_usage = get_nvidia_stats()
+
+ if gpu_usage is None:
+ gpu_usage = get_amd_stats()
+
+ elif system_type == "Darwin":
+ gpu_usage = get_mps_stats()
+
+ return SystemStats(cpu_usage=cpu_usage, ram_usage=ram_usage, gpu_usage=gpu_usage)
+
+
+if __name__ == "__main__":
+ stats = get_system_stats()
+ print(stats)
diff --git a/invokeai/frontend/web/public/locales/en.json b/invokeai/frontend/web/public/locales/en.json
index 4a01f36b6f5..7a6cbc60e12 100644
--- a/invokeai/frontend/web/public/locales/en.json
+++ b/invokeai/frontend/web/public/locales/en.json
@@ -1682,6 +1682,7 @@
"deletePrompt": "Delete Prompt",
"deleteReferenceImage": "Delete Reference Image",
"showHUD": "Show HUD",
+ "showSystemStats": "Show System Stats",
"rectangle": "Rectangle",
"maskFill": "Mask Fill",
"addPositivePrompt": "Add $t(controlLayers.prompt)",
@@ -1942,6 +1943,11 @@
"HUD": {
"bbox": "Bbox",
"scaledBbox": "Scaled Bbox",
+ "cpuUsage": "CPU Usage",
+ "ramUsage": "RAM Usage",
+ "gpuUsage": "GPU Usage",
+ "gpuVram": "GPU VRAM",
+ "gpuTemp": "GPU Temp",
"entityStatus": {
"isFiltering": "{{title}} is filtering",
"isTransforming": "{{title}} is transforming",
diff --git a/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUD.tsx b/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUD.tsx
index 450613d3a5f..0fa4518c92f 100644
--- a/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUD.tsx
+++ b/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUD.tsx
@@ -1,9 +1,19 @@
-import { Grid } from '@invoke-ai/ui-library';
+import { Divider, Grid } from '@invoke-ai/ui-library';
+import { createSelector } from '@reduxjs/toolkit';
+import { useAppSelector } from 'app/store/storeHooks';
import { CanvasHUDItemBbox } from 'features/controlLayers/components/HUD/CanvasHUDItemBbox';
import { CanvasHUDItemScaledBbox } from 'features/controlLayers/components/HUD/CanvasHUDItemScaledBbox';
+import { CanvasHUDItemStats } from 'features/controlLayers/components/HUD/CanvasHUDItemStats';
+import { selectCanvasSettingsSlice } from 'features/controlLayers/store/canvasSettingsSlice';
import { memo } from 'react';
+const selectCanvasSettings = createSelector(selectCanvasSettingsSlice, (canvasSettings) => ({
+ showSystemStats: canvasSettings.showSystemStats,
+}));
+
export const CanvasHUD = memo(() => {
+ const { showSystemStats } = useAppSelector(selectCanvasSettings);
+
return (