diff --git a/invokeai/app/api/routers/app_info.py b/invokeai/app/api/routers/app_info.py index 53249363d4e..75ec6e0623d 100644 --- a/invokeai/app/api/routers/app_info.py +++ b/invokeai/app/api/routers/app_info.py @@ -13,6 +13,7 @@ from invokeai.app.api.dependencies import ApiDependencies from invokeai.app.invocations.upscale import ESRGAN_MODELS from invokeai.app.services.invocation_cache.invocation_cache_common import InvocationCacheStatus +from invokeai.app.services.system_stats.system_stats import SystemStats, get_system_stats from invokeai.backend.image_util.infill_methods.patchmatch import PatchMatch from invokeai.backend.util.logging import logging from invokeai.version import __version__ @@ -182,3 +183,10 @@ async def disable_invocation_cache() -> None: async def get_invocation_cache_status() -> InvocationCacheStatus: """Clears the invocation cache""" return ApiDependencies.invoker.services.invocation_cache.get_status() + + +@app_router.get("/system-stats", operation_id="get_system_stats", status_code=200, response_model=SystemStats) +async def get_stats() -> SystemStats: + """Fetches and returns the system statistics, including CPU, RAM, and GPU stats.""" + stats = get_system_stats() + return stats diff --git a/invokeai/app/services/system_stats/system_stats.py b/invokeai/app/services/system_stats/system_stats.py new file mode 100644 index 00000000000..dabcaf3541e --- /dev/null +++ b/invokeai/app/services/system_stats/system_stats.py @@ -0,0 +1,128 @@ +import platform +import subprocess +from typing import List, Optional + +import psutil +from pydantic import BaseModel + + +class GPUStat(BaseModel): + id: int + load: float + memory: float + memory_total: float + temperature: Optional[float] + + +class SystemStats(BaseModel): + cpu_usage: float + ram_usage: float + gpu_usage: Optional[List[GPUStat]] + + +# Function to fetch NVIDIA GPU stats (including temperature) +def get_nvidia_stats() -> Optional[List[GPUStat]]: + try: + result = subprocess.run( + [ + "nvidia-smi", + "--query-gpu=index,utilization.gpu,memory.used,memory.total,temperature.gpu", + "--format=csv,noheader,nounits", + ], + capture_output=True, + text=True, + ) + output_lines = result.stdout.splitlines() + + gpu_usage = [] + for line in output_lines: + parts = line.split(", ") + if len(parts) == 5: + gpu_id = int(parts[0]) + load = float(parts[1]) + memory_used = float(parts[2]) + memory_total = float(parts[3]) + temperature = float(parts[4]) + gpu_usage.append( + GPUStat( + id=gpu_id, load=load, memory=memory_used, memory_total=memory_total, temperature=temperature + ) + ) + return gpu_usage if gpu_usage else None + except Exception as e: + print(f"Error fetching NVIDIA GPU stats: {e}") + return None + + +# Function to fetch AMD GPU stats (including temperature) +def get_amd_stats() -> Optional[List[GPUStat]]: + try: + result = subprocess.run(["rocm-smi", "--showuse", "--showtemp"], capture_output=True, text=True) + output_lines = result.stdout.splitlines() + + gpu_usage = [] + for line in output_lines: + if "GPU" in line: + parts = line.split() + if len(parts) >= 5: + gpu_id = int(parts[0]) + load = float(parts[1]) + memory_used = float(parts[2]) + memory_total = float(parts[3]) + temperature = float(parts[4]) + gpu_usage.append( + GPUStat( + id=gpu_id, load=load, memory=memory_used, memory_total=memory_total, temperature=temperature + ) + ) + return gpu_usage if gpu_usage else None + except Exception as e: + print(f"Error fetching AMD GPU stats: {e}") + return None + + +# Function to fetch Mac MPS GPU stats (placeholder, needs someone with Mac knowledge) +def get_mps_stats() -> Optional[List[GPUStat]]: + try: + # Using ioreg to get MPS stats on macOS + result = subprocess.run(["ioreg", "-r", "-d", "AppleGPU"], capture_output=True, text=True) + output_lines = result.stdout.splitlines() + + gpu_usage = [] + for line in output_lines: + if "AppleGPU" in line: + # Placeholder logic for parsing; needs to be implemented based on actual ioreg output + gpu_id = len(gpu_usage) + load = 60.0 + memory_used = 8192 + memory_total = 16384 + gpu_usage.append(GPUStat(id=gpu_id, load=load, memory=memory_used, memory_total=memory_total)) + return gpu_usage if gpu_usage else None + except Exception as e: + print(f"Error fetching MPS GPU stats: {e}") + return None + + +# Function to fetch system stats (CPU, RAM, GPU, and temperature) +def get_system_stats() -> SystemStats: + cpu_usage = psutil.cpu_percent(interval=1) + ram_usage = psutil.virtual_memory().used / (1024**2) + + gpu_usage = None + system_type = platform.system() + + if system_type in ["Windows", "Linux"]: + gpu_usage = get_nvidia_stats() + + if gpu_usage is None: + gpu_usage = get_amd_stats() + + elif system_type == "Darwin": + gpu_usage = get_mps_stats() + + return SystemStats(cpu_usage=cpu_usage, ram_usage=ram_usage, gpu_usage=gpu_usage) + + +if __name__ == "__main__": + stats = get_system_stats() + print(stats) diff --git a/invokeai/frontend/web/public/locales/en.json b/invokeai/frontend/web/public/locales/en.json index 4a01f36b6f5..7a6cbc60e12 100644 --- a/invokeai/frontend/web/public/locales/en.json +++ b/invokeai/frontend/web/public/locales/en.json @@ -1682,6 +1682,7 @@ "deletePrompt": "Delete Prompt", "deleteReferenceImage": "Delete Reference Image", "showHUD": "Show HUD", + "showSystemStats": "Show System Stats", "rectangle": "Rectangle", "maskFill": "Mask Fill", "addPositivePrompt": "Add $t(controlLayers.prompt)", @@ -1942,6 +1943,11 @@ "HUD": { "bbox": "Bbox", "scaledBbox": "Scaled Bbox", + "cpuUsage": "CPU Usage", + "ramUsage": "RAM Usage", + "gpuUsage": "GPU Usage", + "gpuVram": "GPU VRAM", + "gpuTemp": "GPU Temp", "entityStatus": { "isFiltering": "{{title}} is filtering", "isTransforming": "{{title}} is transforming", diff --git a/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUD.tsx b/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUD.tsx index 450613d3a5f..0fa4518c92f 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUD.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUD.tsx @@ -1,9 +1,19 @@ -import { Grid } from '@invoke-ai/ui-library'; +import { Divider, Grid } from '@invoke-ai/ui-library'; +import { createSelector } from '@reduxjs/toolkit'; +import { useAppSelector } from 'app/store/storeHooks'; import { CanvasHUDItemBbox } from 'features/controlLayers/components/HUD/CanvasHUDItemBbox'; import { CanvasHUDItemScaledBbox } from 'features/controlLayers/components/HUD/CanvasHUDItemScaledBbox'; +import { CanvasHUDItemStats } from 'features/controlLayers/components/HUD/CanvasHUDItemStats'; +import { selectCanvasSettingsSlice } from 'features/controlLayers/store/canvasSettingsSlice'; import { memo } from 'react'; +const selectCanvasSettings = createSelector(selectCanvasSettingsSlice, (canvasSettings) => ({ + showSystemStats: canvasSettings.showSystemStats, +})); + export const CanvasHUD = memo(() => { + const { showSystemStats } = useAppSelector(selectCanvasSettings); + return ( { > + + {showSystemStats && ( + <> + + + + )} ); }); diff --git a/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUDItemStats.tsx b/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUDItemStats.tsx new file mode 100644 index 00000000000..34f776fff26 --- /dev/null +++ b/invokeai/frontend/web/src/features/controlLayers/components/HUD/CanvasHUDItemStats.tsx @@ -0,0 +1,37 @@ +import { CanvasHUDItem } from 'features/controlLayers/components/HUD/CanvasHUDItem'; +import { Fragment, memo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { useGetSystemStatsQuery } from 'services/api/endpoints/appInfo'; + +export const CanvasHUDItemStats = memo(() => { + const { t } = useTranslation(); + + // Fetch system stats with polling every 1 second + const { data: systemStats } = useGetSystemStatsQuery(undefined, { + pollingInterval: 1000, + }); + + if (!systemStats) { + return null; + } + + return ( + <> + {/* Display system stats (CPU, RAM, GPU) */} + + + + {systemStats.gpu_usage?.map((gpu) => ( + + + + {gpu.temperature !== undefined && ( + + )} + + ))} + + ); +}); + +CanvasHUDItemStats.displayName = 'CanvasHUDItemStats'; diff --git a/invokeai/frontend/web/src/features/controlLayers/components/Settings/CanvasSettingsShowHUDSwitch.tsx b/invokeai/frontend/web/src/features/controlLayers/components/Settings/CanvasSettingsShowHUDSwitch.tsx index e570e0019e5..bd8c4e306d4 100644 --- a/invokeai/frontend/web/src/features/controlLayers/components/Settings/CanvasSettingsShowHUDSwitch.tsx +++ b/invokeai/frontend/web/src/features/controlLayers/components/Settings/CanvasSettingsShowHUDSwitch.tsx @@ -1,27 +1,53 @@ import { FormControl, FormLabel, Switch } from '@invoke-ai/ui-library'; import { createSelector } from '@reduxjs/toolkit'; import { useAppDispatch, useAppSelector } from 'app/store/storeHooks'; -import { selectCanvasSettingsSlice, settingsShowHUDToggled } from 'features/controlLayers/store/canvasSettingsSlice'; +import { + selectCanvasSettingsSlice, + settingsShowHUDToggled, + settingsShowSystemStatsToggled, +} from 'features/controlLayers/store/canvasSettingsSlice'; import { memo, useCallback } from 'react'; import { useTranslation } from 'react-i18next'; const selectShowHUD = createSelector(selectCanvasSettingsSlice, (canvasSettings) => canvasSettings.showHUD); +const selectShowSystemStats = createSelector( + selectCanvasSettingsSlice, + (canvasSettings) => canvasSettings.showSystemStats +); export const CanvasSettingsShowHUDSwitch = memo(() => { const { t } = useTranslation(); const dispatch = useAppDispatch(); const showHUD = useAppSelector(selectShowHUD); - const onChange = useCallback(() => { + const showSystemStats = useAppSelector(selectShowSystemStats); + + const onToggleHUD = useCallback(() => { dispatch(settingsShowHUDToggled()); }, [dispatch]); + const onToggleSystemStats = useCallback(() => { + dispatch(settingsShowSystemStatsToggled()); + }, [dispatch]); + return ( - - - {t('controlLayers.showHUD')} - - - +
+ + + {t('controlLayers.showHUD')} + + + + + {/* Show the System Stats toggle only if Show HUD is enabled */} + {showHUD && ( + + + {t('controlLayers.showSystemStats')} + + + + )} +
); }); diff --git a/invokeai/frontend/web/src/features/controlLayers/store/canvasSettingsSlice.ts b/invokeai/frontend/web/src/features/controlLayers/store/canvasSettingsSlice.ts index d5d55876256..196a34724b3 100644 --- a/invokeai/frontend/web/src/features/controlLayers/store/canvasSettingsSlice.ts +++ b/invokeai/frontend/web/src/features/controlLayers/store/canvasSettingsSlice.ts @@ -79,10 +79,12 @@ type CanvasSettingsState = { * Whether to use pressure sensitivity for the brush and eraser tool when a pen device is used. */ pressureSensitivity: boolean; + showSystemStats: boolean; }; const initialState: CanvasSettingsState = { showHUD: true, + showSystemStats: false, clipToBbox: false, dynamicGrid: false, brushWidth: 50, @@ -114,6 +116,9 @@ export const canvasSettingsSlice = createSlice({ settingsShowHUDToggled: (state) => { state.showHUD = !state.showHUD; }, + settingsShowSystemStatsToggled: (state) => { + state.showSystemStats = !state.showSystemStats; + }, settingsBrushWidthChanged: (state, action: PayloadAction) => { state.brushWidth = Math.round(action.payload); }, @@ -185,6 +190,7 @@ export const { settingsIsolatedStagingPreviewToggled, settingsIsolatedLayerPreviewToggled, settingsPressureSensitivityToggled, + settingsShowSystemStatsToggled, } = canvasSettingsSlice.actions; /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ diff --git a/invokeai/frontend/web/src/services/api/endpoints/appInfo.ts b/invokeai/frontend/web/src/services/api/endpoints/appInfo.ts index a7efaafcc82..bcc607b3996 100644 --- a/invokeai/frontend/web/src/services/api/endpoints/appInfo.ts +++ b/invokeai/frontend/web/src/services/api/endpoints/appInfo.ts @@ -1,7 +1,7 @@ import { $openAPISchemaUrl } from 'app/store/nanostores/openAPISchemaUrl'; import type { OpenAPIV3_1 } from 'openapi-types'; import type { paths } from 'services/api/schema'; -import type { AppConfig, AppDependencyVersions, AppVersion } from 'services/api/types'; +import type { AppConfig, AppDependencyVersions, AppVersion, SystemStats } from 'services/api/types'; import { api, buildV1Url } from '..'; @@ -75,6 +75,14 @@ export const appInfoApi = api.injectEndpoints({ }, providesTags: ['Schema'], }), + + getSystemStats: build.query({ + query: () => ({ + url: buildAppInfoUrl('system-stats'), + method: 'GET', + }), + providesTags: ['FetchOnReconnect'], + }), }), }); @@ -88,4 +96,5 @@ export const { useGetInvocationCacheStatusQuery, useGetOpenAPISchemaQuery, useLazyGetOpenAPISchemaQuery, + useGetSystemStatsQuery, } = appInfoApi; diff --git a/invokeai/frontend/web/src/services/api/types.ts b/invokeai/frontend/web/src/services/api/types.ts index 5c667190fee..12a5bb3f4b4 100644 --- a/invokeai/frontend/web/src/services/api/types.ts +++ b/invokeai/frontend/web/src/services/api/types.ts @@ -242,6 +242,21 @@ export type PostUploadAction = | UpscaleInitialImageAction | ReplaceLayerWithImagePostUploadAction; +// System Stats +interface GPUStat { + id: number; + load: number; + memory: number; + memory_total: number; + temperature: number; +} + +export interface SystemStats { + cpu_usage: number; + ram_usage: number; + gpu_usage: GPUStat[]; +} + export type BoardRecordOrderBy = S['BoardRecordOrderBy']; export type StarterModel = S['StarterModel'];