From d17bf6a04a0aa5287bfd7e820d2b342898483023 Mon Sep 17 00:00:00 2001 From: Vali98 Date: Wed, 27 Nov 2024 17:20:01 +0800 Subject: [PATCH] fix: crash on startup due to importing invalid model, causing verifyModelList to keep loading incompatible file --- app/components/ModelManager/ModelItem.tsx | 49 +++++++++++------- app/constants/LlamaLocal.ts | 61 +++++++++++++++++++++-- 2 files changed, 87 insertions(+), 23 deletions(-) diff --git a/app/components/ModelManager/ModelItem.tsx b/app/components/ModelManager/ModelItem.tsx index ee10c85..afe0f04 100644 --- a/app/components/ModelManager/ModelItem.tsx +++ b/app/components/ModelManager/ModelItem.tsx @@ -34,17 +34,18 @@ const ModelItem: React.FC = ({ const [autoLoad, setAutoLoad] = useMMKVObject(Global.LocalModel) //@ts-ignore const quant: string = item.quantization && GGMLNameMap[item.quantization] - const disable = modelLoading || modelImporting || modelId !== undefined const disableDelete = modelId === item.id || modelLoading - const disableEdit = modelId === item.id || modelLoading + const isInvalid = Llama.isInitialEntry(item) const handleDeleteModel = () => { Alert.alert({ title: 'Delete Model', description: `Are you sure you want to delete "${item.name}"?\n\nThis cannot be undone!` + - (!item.file_path.startsWith('content') - ? `\n\nThis operation will clear up ${readableFileSize(item.file_size)}` + (!isInvalid + ? !item.file_path.startsWith('content') + ? `\n\nThis operation will clear up ${readableFileSize(item.file_size)}` + : '\n\n(This will not delete external model files, just this entry)' : ''), buttons: [ { label: 'Cancel' }, @@ -59,6 +60,9 @@ const ModelItem: React.FC = ({ }) } + const disable = modelLoading || modelImporting || modelId !== undefined || isInvalid + const disableEdit = modelId === item.id || modelLoading || isInvalid + return ( = ({ /> {item.name} - - - {item.params === 'N/A' ? 'No Param Size' : item.params} - - {quant} - {readableFileSize(item.file_size)} - - {item.architecture} - - - {item.file_path.startsWith('content') ? 'External' : 'Internal'} - - - Context Length: {item.context_length} + {!isInvalid && ( + + + {item.params === 'N/A' ? 'No Param Size' : item.params} + + {quant} + {readableFileSize(item.file_size)} + + {item.architecture} + + + {item.file_path.startsWith('content') ? 'External' : 'Internal'} + + + )} + {isInvalid && ( + + Model is Invalid + + )} + {!isInvalid && ( + Context Length: {item.context_length} + )} File: {item.file.replace('.gguf', '')} { const newdir = `${model_dir}${filename}` + const initialModelEntry = { + context_length: 0, + file: filename, + file_path: newdir, + name: 'N/A', + file_size: 0, + params: 'N/A', + quantization: '-1', + architecture: 'N/A', + } + const [{ id }, ...rest] = await db + .insert(model_data) + .values(initialModelEntry) + .returning({ id: model_data.id }) + try { const modelContext = await initLlama({ model: newdir, vocab_only: true }) const modelInfo: any = modelContext.model const modelType = modelInfo.metadata?.['general.architecture'] const fileInfo = await FS.getInfoAsync(newdir) const modelDataEntry = { - context_length: modelInfo.metadata?.[modelType + '.context_length'] ?? '0', + context_length: modelInfo.metadata?.[modelType + '.context_length'] ?? 0, file: filename, file_path: newdir, name: modelInfo.metadata?.['general.name'] ?? 'N/A', @@ -390,8 +405,7 @@ export namespace Llama { } Logger.log(`New Model Data:\n${modelDataText(modelDataEntry)}`) await modelContext.release() - - await db.insert(model_data).values(modelDataEntry) + await db.update(model_data).set(modelDataEntry).where(eq(model_data.id, id)) return true } catch (e) { Logger.log(`Failed to create data: ${e}`, true) @@ -409,13 +423,29 @@ export namespace Llama { Logger.log('Filename invalid, Import Failed', true) return } + + const initialModelEntry = { + context_length: 0, + file: filename, + file_path: newdir, + name: 'N/A', + file_size: 0, + params: 'N/A', + quantization: '-1', + architecture: 'N/A', + } + const [{ id }, ...rest] = await db + .insert(model_data) + .values(initialModelEntry) + .returning({ id: model_data.id }) + try { const modelContext = await initLlama({ model: newdir, vocab_only: true }) const modelInfo: any = modelContext.model const modelType = modelInfo.metadata?.['general.architecture'] const fileInfo = await FS.getInfoAsync(newdir) const modelDataEntry = { - context_length: modelInfo.metadata?.[modelType + '.context_length'] ?? '0', + context_length: modelInfo.metadata?.[modelType + '.context_length'] ?? 0, file: filename, file_path: newdir, name: modelInfo.metadata?.['general.name'] ?? 'N/A', @@ -427,7 +457,7 @@ export namespace Llama { Logger.log(`New Model Data:\n${modelDataText(modelDataEntry)}`) await modelContext.release() - await db.insert(model_data).values(modelDataEntry) + await db.update(model_data).set(modelDataEntry).where(eq(model_data.id, id)) return true } catch (e) { Logger.log(`Failed to create data: ${e}`, true) @@ -493,6 +523,27 @@ export namespace Llama { export const updateName = async (name: string, id: number) => { await db.update(model_data).set({ name: name }).where(eq(model_data.id, id)) } + + export const isInitialEntry = (data: ModelData) => { + const initial: ModelData = { + file: '', + file_path: '', + context_length: 0, + name: 'N/A', + file_size: 0, + params: 'N/A', + quantization: '-1', + architecture: 'N/A', + } + + for (const key in initial) { + if (key === 'file' || key === 'file_path') continue + const initialV = initial[key as keyof ModelData] + const dataV = data[key as keyof ModelData] + if (initialV !== dataV) return false + } + return true + } } enum GGMLType {