Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat(ui) System Stats, CPU RAM and GPU Usage #7005

Draft
wants to merge 10 commits into
base: main
Choose a base branch
from
8 changes: 8 additions & 0 deletions invokeai/app/api/routers/app_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from invokeai.app.api.dependencies import ApiDependencies
from invokeai.app.invocations.upscale import ESRGAN_MODELS
from invokeai.app.services.invocation_cache.invocation_cache_common import InvocationCacheStatus
from invokeai.app.services.system_stats.system_stats import SystemStats, get_system_stats
from invokeai.backend.image_util.infill_methods.patchmatch import PatchMatch
from invokeai.backend.util.logging import logging
from invokeai.version import __version__
Expand Down Expand Up @@ -182,3 +183,10 @@ async def disable_invocation_cache() -> None:
async def get_invocation_cache_status() -> InvocationCacheStatus:
"""Clears the invocation cache"""
return ApiDependencies.invoker.services.invocation_cache.get_status()


@app_router.get("/system-stats", operation_id="get_system_stats", status_code=200, response_model=SystemStats)
async def get_stats() -> SystemStats:
"""Fetches and returns the system statistics, including CPU, RAM, and GPU stats."""
stats = get_system_stats()
return stats
128 changes: 128 additions & 0 deletions invokeai/app/services/system_stats/system_stats.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
import platform
import subprocess
from typing import List, Optional

import psutil
from pydantic import BaseModel


class GPUStat(BaseModel):
id: int
load: float
memory: float
memory_total: float
temperature: Optional[float]


class SystemStats(BaseModel):
cpu_usage: float
ram_usage: float
gpu_usage: Optional[List[GPUStat]]


# Function to fetch NVIDIA GPU stats (including temperature)
def get_nvidia_stats() -> Optional[List[GPUStat]]:
try:
result = subprocess.run(
[
"nvidia-smi",
"--query-gpu=index,utilization.gpu,memory.used,memory.total,temperature.gpu",
"--format=csv,noheader,nounits",
],
capture_output=True,
text=True,
)
output_lines = result.stdout.splitlines()

gpu_usage = []
for line in output_lines:
parts = line.split(", ")
if len(parts) == 5:
gpu_id = int(parts[0])
load = float(parts[1])
memory_used = float(parts[2])
memory_total = float(parts[3])
temperature = float(parts[4])
gpu_usage.append(
GPUStat(
id=gpu_id, load=load, memory=memory_used, memory_total=memory_total, temperature=temperature
)
)
return gpu_usage if gpu_usage else None
except Exception as e:
print(f"Error fetching NVIDIA GPU stats: {e}")
return None


# Function to fetch AMD GPU stats (including temperature)
def get_amd_stats() -> Optional[List[GPUStat]]:
try:
result = subprocess.run(["rocm-smi", "--showuse", "--showtemp"], capture_output=True, text=True)
output_lines = result.stdout.splitlines()

gpu_usage = []
for line in output_lines:
if "GPU" in line:
parts = line.split()
if len(parts) >= 5:
gpu_id = int(parts[0])
load = float(parts[1])
memory_used = float(parts[2])
memory_total = float(parts[3])
temperature = float(parts[4])
gpu_usage.append(
GPUStat(
id=gpu_id, load=load, memory=memory_used, memory_total=memory_total, temperature=temperature
)
)
return gpu_usage if gpu_usage else None
except Exception as e:
print(f"Error fetching AMD GPU stats: {e}")
return None


# Function to fetch Mac MPS GPU stats (placeholder, needs someone with Mac knowledge)
def get_mps_stats() -> Optional[List[GPUStat]]:
try:
# Using ioreg to get MPS stats on macOS
result = subprocess.run(["ioreg", "-r", "-d", "AppleGPU"], capture_output=True, text=True)
output_lines = result.stdout.splitlines()

gpu_usage = []
for line in output_lines:
if "AppleGPU" in line:
# Placeholder logic for parsing; needs to be implemented based on actual ioreg output
gpu_id = len(gpu_usage)
load = 60.0
memory_used = 8192
memory_total = 16384
gpu_usage.append(GPUStat(id=gpu_id, load=load, memory=memory_used, memory_total=memory_total))
return gpu_usage if gpu_usage else None
except Exception as e:
print(f"Error fetching MPS GPU stats: {e}")
return None


# Function to fetch system stats (CPU, RAM, GPU, and temperature)
def get_system_stats() -> SystemStats:
cpu_usage = psutil.cpu_percent(interval=1)
ram_usage = psutil.virtual_memory().used / (1024**2)

gpu_usage = None
system_type = platform.system()

if system_type in ["Windows", "Linux"]:
gpu_usage = get_nvidia_stats()

if gpu_usage is None:
gpu_usage = get_amd_stats()

elif system_type == "Darwin":
gpu_usage = get_mps_stats()

return SystemStats(cpu_usage=cpu_usage, ram_usage=ram_usage, gpu_usage=gpu_usage)


if __name__ == "__main__":
stats = get_system_stats()
print(stats)
6 changes: 6 additions & 0 deletions invokeai/frontend/web/public/locales/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -1682,6 +1682,7 @@
"deletePrompt": "Delete Prompt",
"deleteReferenceImage": "Delete Reference Image",
"showHUD": "Show HUD",
"showSystemStats": "Show System Stats",
"rectangle": "Rectangle",
"maskFill": "Mask Fill",
"addPositivePrompt": "Add $t(controlLayers.prompt)",
Expand Down Expand Up @@ -1942,6 +1943,11 @@
"HUD": {
"bbox": "Bbox",
"scaledBbox": "Scaled Bbox",
"cpuUsage": "CPU Usage",
"ramUsage": "RAM Usage",
"gpuUsage": "GPU Usage",
"gpuVram": "GPU VRAM",
"gpuTemp": "GPU Temp",
"entityStatus": {
"isFiltering": "{{title}} is filtering",
"isTransforming": "{{title}} is transforming",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,19 @@
import { Grid } from '@invoke-ai/ui-library';
import { Divider, Grid } from '@invoke-ai/ui-library';
import { createSelector } from '@reduxjs/toolkit';
import { useAppSelector } from 'app/store/storeHooks';
import { CanvasHUDItemBbox } from 'features/controlLayers/components/HUD/CanvasHUDItemBbox';
import { CanvasHUDItemScaledBbox } from 'features/controlLayers/components/HUD/CanvasHUDItemScaledBbox';
import { CanvasHUDItemStats } from 'features/controlLayers/components/HUD/CanvasHUDItemStats';
import { selectCanvasSettingsSlice } from 'features/controlLayers/store/canvasSettingsSlice';
import { memo } from 'react';

const selectCanvasSettings = createSelector(selectCanvasSettingsSlice, (canvasSettings) => ({
showSystemStats: canvasSettings.showSystemStats,
}));

export const CanvasHUD = memo(() => {
const { showSystemStats } = useAppSelector(selectCanvasSettings);

return (
<Grid
bg="base.900"
Expand All @@ -17,6 +27,13 @@ export const CanvasHUD = memo(() => {
>
<CanvasHUDItemBbox />
<CanvasHUDItemScaledBbox />

{showSystemStats && (
<>
<Divider gridColumn="span 2" />
<CanvasHUDItemStats />
</>
)}
</Grid>
);
});
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import { CanvasHUDItem } from 'features/controlLayers/components/HUD/CanvasHUDItem';
import { Fragment, memo } from 'react';
import { useTranslation } from 'react-i18next';
import { useGetSystemStatsQuery } from 'services/api/endpoints/appInfo';

export const CanvasHUDItemStats = memo(() => {
const { t } = useTranslation();

// Fetch system stats with polling every 1 second
const { data: systemStats } = useGetSystemStatsQuery(undefined, {
pollingInterval: 1000,
});

if (!systemStats) {
return null;
}

return (
<>
{/* Display system stats (CPU, RAM, GPU) */}
<CanvasHUDItem label={t('controlLayers.HUD.cpuUsage')} value={`${systemStats.cpu_usage.toFixed(0)}%`} />
<CanvasHUDItem label={t('controlLayers.HUD.ramUsage')} value={`${systemStats.ram_usage.toFixed(0)} MB`} />

{systemStats.gpu_usage?.map((gpu) => (
<Fragment key={gpu.id}>
<CanvasHUDItem label={t('controlLayers.HUD.gpuUsage')} value={`${gpu.load.toFixed(0)}%`} />
<CanvasHUDItem label={t('controlLayers.HUD.gpuVram')} value={`${gpu.memory} MB`} />
{gpu.temperature !== undefined && (
<CanvasHUDItem label={t('controlLayers.HUD.gpuTemp')} value={`${gpu.temperature} °C`} />
)}
</Fragment>
))}
</>
);
});

CanvasHUDItemStats.displayName = 'CanvasHUDItemStats';
Original file line number Diff line number Diff line change
@@ -1,27 +1,53 @@
import { FormControl, FormLabel, Switch } from '@invoke-ai/ui-library';
import { createSelector } from '@reduxjs/toolkit';
import { useAppDispatch, useAppSelector } from 'app/store/storeHooks';
import { selectCanvasSettingsSlice, settingsShowHUDToggled } from 'features/controlLayers/store/canvasSettingsSlice';
import {
selectCanvasSettingsSlice,
settingsShowHUDToggled,
settingsShowSystemStatsToggled,
} from 'features/controlLayers/store/canvasSettingsSlice';
import { memo, useCallback } from 'react';
import { useTranslation } from 'react-i18next';

const selectShowHUD = createSelector(selectCanvasSettingsSlice, (canvasSettings) => canvasSettings.showHUD);
const selectShowSystemStats = createSelector(
selectCanvasSettingsSlice,
(canvasSettings) => canvasSettings.showSystemStats
);

export const CanvasSettingsShowHUDSwitch = memo(() => {
const { t } = useTranslation();
const dispatch = useAppDispatch();
const showHUD = useAppSelector(selectShowHUD);
const onChange = useCallback(() => {
const showSystemStats = useAppSelector(selectShowSystemStats);

const onToggleHUD = useCallback(() => {
dispatch(settingsShowHUDToggled());
}, [dispatch]);

const onToggleSystemStats = useCallback(() => {
dispatch(settingsShowSystemStatsToggled());
}, [dispatch]);

return (
<FormControl>
<FormLabel m={0} flexGrow={1}>
{t('controlLayers.showHUD')}
</FormLabel>
<Switch size="sm" isChecked={showHUD} onChange={onChange} />
</FormControl>
<div>
<FormControl>
<FormLabel m={0} flexGrow={1}>
{t('controlLayers.showHUD')}
</FormLabel>
<Switch size="sm" isChecked={showHUD} onChange={onToggleHUD} />
</FormControl>

{/* Show the System Stats toggle only if Show HUD is enabled */}
{showHUD && (
<FormControl mt={2}>
<FormLabel m={0} flexGrow={1}>
{t('controlLayers.showSystemStats')}
</FormLabel>
<Switch size="sm" isChecked={showSystemStats} onChange={onToggleSystemStats} />
</FormControl>
)}
</div>
);
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,12 @@ type CanvasSettingsState = {
* Whether to use pressure sensitivity for the brush and eraser tool when a pen device is used.
*/
pressureSensitivity: boolean;
showSystemStats: boolean;
};

const initialState: CanvasSettingsState = {
showHUD: true,
showSystemStats: false,
clipToBbox: false,
dynamicGrid: false,
brushWidth: 50,
Expand Down Expand Up @@ -114,6 +116,9 @@ export const canvasSettingsSlice = createSlice({
settingsShowHUDToggled: (state) => {
state.showHUD = !state.showHUD;
},
settingsShowSystemStatsToggled: (state) => {
state.showSystemStats = !state.showSystemStats;
},
settingsBrushWidthChanged: (state, action: PayloadAction<number>) => {
state.brushWidth = Math.round(action.payload);
},
Expand Down Expand Up @@ -185,6 +190,7 @@ export const {
settingsIsolatedStagingPreviewToggled,
settingsIsolatedLayerPreviewToggled,
settingsPressureSensitivityToggled,
settingsShowSystemStatsToggled,
} = canvasSettingsSlice.actions;

/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
Expand Down
11 changes: 10 additions & 1 deletion invokeai/frontend/web/src/services/api/endpoints/appInfo.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { $openAPISchemaUrl } from 'app/store/nanostores/openAPISchemaUrl';
import type { OpenAPIV3_1 } from 'openapi-types';
import type { paths } from 'services/api/schema';
import type { AppConfig, AppDependencyVersions, AppVersion } from 'services/api/types';
import type { AppConfig, AppDependencyVersions, AppVersion, SystemStats } from 'services/api/types';

import { api, buildV1Url } from '..';

Expand Down Expand Up @@ -75,6 +75,14 @@ export const appInfoApi = api.injectEndpoints({
},
providesTags: ['Schema'],
}),

getSystemStats: build.query<SystemStats, void>({
query: () => ({
url: buildAppInfoUrl('system-stats'),
method: 'GET',
}),
providesTags: ['FetchOnReconnect'],
}),
}),
});

Expand All @@ -88,4 +96,5 @@ export const {
useGetInvocationCacheStatusQuery,
useGetOpenAPISchemaQuery,
useLazyGetOpenAPISchemaQuery,
useGetSystemStatsQuery,
} = appInfoApi;
15 changes: 15 additions & 0 deletions invokeai/frontend/web/src/services/api/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,21 @@ export type PostUploadAction =
| UpscaleInitialImageAction
| ReplaceLayerWithImagePostUploadAction;

// System Stats
interface GPUStat {
id: number;
load: number;
memory: number;
memory_total: number;
temperature: number;
}

export interface SystemStats {
cpu_usage: number;
ram_usage: number;
gpu_usage: GPUStat[];
}

export type BoardRecordOrderBy = S['BoardRecordOrderBy'];
export type StarterModel = S['StarterModel'];

Expand Down