{{ account.address }}
@@ -53,8 +43,8 @@
diff --git a/src/pages/IndexPage.vue b/src/pages/IndexPage.vue
deleted file mode 100644
index d40f599..0000000
--- a/src/pages/IndexPage.vue
+++ /dev/null
@@ -1,26 +0,0 @@
-
-
-
-
-
-
-
-
diff --git a/src/pages/KnowledgeBase.vue b/src/pages/KnowledgeBase.vue
index 27ead37..b1d435a 100644
--- a/src/pages/KnowledgeBase.vue
+++ b/src/pages/KnowledgeBase.vue
@@ -16,12 +16,7 @@
-
+
{{ document.title }}
{{ document.description }}
@@ -51,8 +46,8 @@
-->
haha
-
diff --git a/src/router/index.js b/src/router/index.js
index e7424f1..6d25295 100644
--- a/src/router/index.js
+++ b/src/router/index.js
@@ -1,11 +1,6 @@
-import { route } from "quasar/wrappers";
-import {
- createRouter,
- createMemoryHistory,
- createWebHistory,
- createWebHashHistory,
-} from "vue-router";
-import routes from "./routes";
+import { route } from 'quasar/wrappers';
+import { createRouter, createMemoryHistory, createWebHistory, createWebHashHistory } from 'vue-router';
+import routes from './routes';
/*
* If not building with SSR mode, you can
@@ -19,7 +14,7 @@ import routes from "./routes";
export default route(function (/* { store, ssrContext } */) {
const createHistory = process.env.SERVER
? createMemoryHistory
- : process.env.VUE_ROUTER_MODE === "history"
+ : process.env.VUE_ROUTER_MODE === 'history'
? createWebHistory
: createWebHashHistory;
diff --git a/src/router/routes.js b/src/router/routes.js
index d4fed03..caecd1b 100644
--- a/src/router/routes.js
+++ b/src/router/routes.js
@@ -1,32 +1,31 @@
const routes = [
{
- path: "/",
- component: () => import("layouts/MainLayout.vue"),
+ path: '/',
+ component: () => import('layouts/MainLayout.vue'),
children: [
- // { path: '', component: () => import('pages/IndexPage.vue') },
- { path: "", redirect: "/new" },
- { path: "vocal", component: () => import("pages/VocalChat.vue") },
+ { path: '', redirect: '/new' },
+ { path: 'vocal', component: () => import('pages/VocalChat.vue') },
{
- path: "new",
- name: "new-chat",
- component: () => import("pages/NewChat.vue"),
+ path: 'new',
+ name: 'new-chat',
+ component: () => import('pages/NewChat.vue'),
},
// chat view with chat id
{
- path: "chat/:id",
- name: "chat",
- component: () => import("pages/Chat.vue"),
+ path: 'chat/:id',
+ name: 'chat',
+ component: () => import('pages/Chat.vue'),
},
- { path: "points", component: () => import("src/pages/PointsInfo.vue") },
+ { path: 'points', component: () => import('src/pages/PointsInfo.vue') },
{
- path: "points/:address",
- name: "points-detail",
- component: () => import("src/pages/PointsDetail.vue"),
+ path: 'points/:address',
+ name: 'points-detail',
+ component: () => import('src/pages/PointsDetail.vue'),
},
// knowledge db
{
- path: "knowledge-db",
- component: () => import("src/pages/KnowledgeBase.vue"),
+ path: 'knowledge-db',
+ component: () => import('src/pages/KnowledgeBase.vue'),
},
],
},
@@ -34,8 +33,8 @@ const routes = [
// Always leave this as last one,
// but you can also remove it
{
- path: "/:catchAll(.*)*",
- component: () => import("pages/ErrorNotFound.vue"),
+ path: '/:catchAll(.*)*',
+ component: () => import('pages/ErrorNotFound.vue'),
},
];
diff --git a/src/stores/account.js b/src/stores/account.js
index 271e5b0..0175e54 100644
--- a/src/stores/account.js
+++ b/src/stores/account.js
@@ -1,12 +1,12 @@
-import { defineStore } from "pinia";
-import { ethers } from "ethers";
+import { defineStore } from 'pinia';
+import { ethers } from 'ethers';
-export const useAccount = defineStore("account", {
+export const useAccount = defineStore('account', {
state: () => ({
active: false,
provider: null,
signer: null,
- address: "",
+ address: '',
}),
getters: {
alephAccount() {
@@ -15,8 +15,8 @@ export const useAccount = defineStore("account", {
mnemonics: null,
address: this.address,
name: this.address,
- type: "ETH",
- source: "provider",
+ type: 'ETH',
+ source: 'provider',
provider: this.provider,
signer: this.signer,
};
@@ -28,12 +28,12 @@ export const useAccount = defineStore("account", {
// you can directly mutate the state
this.provider = provider;
console.log(provider.provider);
- provider.provider.on("accountsChanged", async (accounts) => {
- console.log("accountsChanged", accounts);
+ provider.provider.on('accountsChanged', async (accounts) => {
+ console.log('accountsChanged', accounts);
this.address = ethers.utils.getAddress(accounts[0]);
this.signer = await this.provider.getSigner();
});
- console.log("provider", provider);
+ console.log('provider', provider);
this.signer = await this.provider.getSigner();
this.address = await this.signer.getAddress();
this.active = true;
@@ -42,7 +42,7 @@ export const useAccount = defineStore("account", {
this.active = false;
this.provider = null;
this.signer = null;
- this.address = "";
+ this.address = '';
},
},
});
diff --git a/src/stores/chats-store.js b/src/stores/chats-store.js
new file mode 100644
index 0000000..2973b76
--- /dev/null
+++ b/src/stores/chats-store.js
@@ -0,0 +1,364 @@
+import { v4 as uuidv4 } from 'uuid';
+import { defineStore } from 'pinia';
+
+import { defaultModels } from 'src/utils/models';
+import * as idb from 'src/utils/idb';
+
+const CHATS_STORE_NAME = 'chats-store';
+const CHATS_STORE_PINIA_KEY = 'chats-store-pinia-key';
+
+/**
+ * Representation of a single chat:
+ *
+ * interface Chat {
+ * id: string;
+ * title: string;
+ * username: string;
+ *
+ * // From @libertai/libertai-js
+ * model: Model;
+ * persona: Persona;
+ * messages: Message[];
+ * }
+ */
+
+export const useChatsStore = defineStore(CHATS_STORE_PINIA_KEY, {
+ state: () => ({
+ // Interface for our ChatsStore
+ chatsStore: new ChatsStore(),
+ // List of partials chats of the form { id, title }
+ chats: [],
+ }),
+ getters: {},
+ actions: {
+ /**
+ * Load the chats from persistent storage and update the models
+ * @async
+ * @returns {Promise}
+ */
+ async load() {
+ // Update the models for all chats
+ await this.chatsStore.updateModels(defaultModels);
+ // Get the partial chats of the form { id, title }
+ let chats = await this.chatsStore.readChats();
+ this.chats = chats;
+ return;
+ },
+
+ /**
+ * Read a chat by its id
+ * @async
+ * @param {string} id - the id of the chat
+ * @returns {Promise} - the chat or null if not found
+ * @throws {Error} - if the chat is not found
+ */
+ async readChat(id) {
+ const chat = await this.chatsStore.readChat(id);
+ return chat;
+ },
+
+ /**
+ * Create a new chat
+ * @async
+ * @param {string} title - the title of the chat
+ * @param {string} username - the username of the user
+ * @param {Model} model - the model to use for the chat
+ * @param {Persona} persona - the persona to use for the chat
+ * @returns {Promise} - the created chat
+ */
+ async createChat(title, username, model, persona) {
+ const chat = await this.chatsStore.createChat(title, username, model, persona);
+ this.chats.push(chat);
+ return chat;
+ },
+
+ /**
+ * Update the title of a chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @param {string} title - the new title of the chat
+ * @returns {Promise}
+ * @throws {Error} - if the chat is not found
+ */
+ async updateChatTitle(chatId, title) {
+ await this.chatsStore.updateChat(chatId, { title });
+ // Update the partial chats
+ this.chats = this.chats.map((chat) => {
+ if (chat.id === chatId) {
+ chat.title = title;
+ }
+ return chat;
+ });
+ },
+
+ /**
+ * Update the model of a chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @param {Model} model - the new model of the chat
+ * @returns {Promise}
+ * @throws {Error} - if the chat is not found
+ */
+ async updateChatModel(chatId, model) {
+ await this.chatsStore.updateChat(chatId, { model });
+ // Update the partial chats
+ this.chats = this.chats.map((chat) => {
+ if (chat.id === chatId) {
+ chat.model = model;
+ }
+ return chat;
+ });
+ },
+
+ /**
+ * Update chat message content
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @param {number} messageIndex - the index of the message
+ * @param {string} content - the new content of the message
+ * @returns {Promise}
+ * @throws {Error} - if the chat is not found
+ */
+ async updateChatMessageContent(chatId, messageIndex, content) {
+ let chat = await this.chatsStore.readChat(chatId);
+ let messages = chat.messages;
+ messages[messageIndex].content = content;
+ await this.chatsStore.updateChat(chatId, { messages });
+ },
+
+ /**
+ * Pop the last message from a chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @returns {Promise}
+ */
+ async popChatMessages(chatId) {
+ return await this.chatsStore.popChatMessages(chatId);
+ },
+
+ /**
+ * append a user message to a chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @param {string} message - the content of the message
+ * @returns {Promise} - the created message
+ */
+ async appendUserMessage(chatId, message) {
+ return await this.chatsStore.appendUserMessage(chatId, message);
+ },
+
+ /**
+ * append a model response to a chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @param {string} response - the content of the response
+ * @returns {Promise} - the created message
+ */
+ async appendModelResponse(chatId, response) {
+ return await this.chatsStore.appendModelResponse(chatId, response);
+ },
+
+ /**
+ * Delete a chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @returns {Promise}
+ */
+ async deleteChat(chatId) {
+ await this.chatsStore.deleteChat(chatId);
+ this.chats = this.chats.filter((c) => c.id !== chatId);
+ },
+ },
+});
+
+class ChatsStore {
+ constructor() {
+ // Initialize the localforage store
+ this.store = idb.createStore(CHATS_STORE_NAME);
+
+ this.createChat = this.createChat.bind(this);
+ this.readChats = this.readChats.bind(this);
+ this.readChat = this.readChat.bind(this);
+ this.updateChat = this.updateChat.bind(this);
+ this.popChatMessages = this.popChatMessages.bind(this);
+ this.appendUserMessage = this.appendUserMessage.bind(this);
+ this.appendModelResponse = this.appendModelResponse.bind(this);
+ }
+
+ /**
+ * Update the models for all of our chats if one of them has been updated.
+ * Searches for the model by apiUrl and updates the chat model if it has changed.
+ * @async
+ * @param {Model[]} - list of up-to-date models
+ * @returns {Promise}
+ */
+ async updateModels(upToDateModels) {
+ // Create an array for the updates we will resolve
+ const updatedChats = [];
+
+ // Iterate over all chats and update the model if necessary
+ await this.store.iterate((value, _key, _iterationNumber) => {
+ // Find the chat and model
+ const chat = value;
+ const apiUrl = chat.model.apiUrl;
+ const upToDateModel = upToDateModels.find((m) => m.apiUrl === apiUrl);
+
+ // Determine if the model has changed
+ let changed = false;
+ if (upToDateModel) {
+ // Do a deep comparison of the models
+ if (upToDateModel !== chat.model) {
+ chat.model = upToDateModel;
+ changed = true;
+ }
+ } else {
+ chat.model = upToDateModel;
+ changed = true;
+ }
+
+ // If the model has changed, update the chat
+ if (changed) {
+ updatedChats.push(idb.put(chat.id, chat, this.store));
+ }
+ });
+
+ // Resolve all the updates
+ await Promise.all(updatedChats);
+
+ // Done!
+ return;
+ }
+
+ /**
+ * Create a new chat
+ * @async
+ * @param {string} title - the title of the chat
+ * @param {string} username - the username of the user
+ * @param {Model} model - the model to use for the chat
+ * @param {Persona} persona - the persona to use for the chat
+ * @returns {Promise} - the created chat
+ */
+ async createChat(title, username, model, persona) {
+ const id = uuidv4();
+ const chat = {
+ id,
+ title,
+ username,
+ model,
+ persona,
+ messages: [],
+ };
+ return await idb.put(chat.id, chat, this.store);
+ }
+
+ /**
+ * Read all chats from the store as of the form { id, title }
+ * @async
+ * @returns {Promise[]>} - list of partial chats
+ */
+ async readChats() {
+ const result = [];
+ await this.store.iterate((value, _key) => {
+ const chat = value;
+ const partialChat = {
+ id: chat.id,
+ title: chat.title,
+ };
+ result.push(partialChat);
+ });
+ return result;
+ }
+
+ /**
+ * Read a chat from the store by its id
+ * @async
+ * @param {string} id - the id of the chat
+ * @returns {Promise} - the chat
+ * @throws {Error} - if the chat is not found
+ */
+ async readChat(id) {
+ let chat = idb.get(id, this.store);
+ if (!chat) {
+ throw new Error('Chat not found');
+ }
+ return chat;
+ }
+
+ /**
+ * Update a chat with a partial data
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @param {Partial} chat - the partial chat data
+ * @returns {Promise}
+ * @throws {Error} - if the chat is not found
+ */
+ async updateChat(chatId, chat) {
+ const fullChat = await this.readChat(chatId);
+ // TODO: I should probably validate the chat data here
+ const updatedChat = { ...fullChat, ...chat };
+ await idb.put(chatId, updatedChat, this.store);
+ }
+
+ /**
+ * Pop the last message from a chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @returns {Promise}
+ * @throws {Error} - if the chat is not found
+ */
+ async popChatMessages(chatId) {
+ const chat = await this.readChat(chatId);
+ chat.messages.pop();
+ await idb.put(chatId, chat, this.store);
+ }
+
+ /**
+ * Append a user message to a chat -- a message with the user's role as recorded in the chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @param {string} messageContent - the content of the message
+ * @returns {Promise} - the created message
+ * @throws {Error} - if the chat is not found
+ */
+ async appendUserMessage(chatId, messageContent) {
+ const chat = await this.readChat(chatId);
+ const message = {
+ role: chat.username,
+ content: messageContent,
+ timestamp: new Date(),
+ };
+ chat.messages.push(message);
+ await idb.put(chatId, chat, this.store);
+ return message;
+ }
+
+ /**
+ * Append a model's response to a chat
+ * @async
+ * @param {string} chatId - the id of the chat
+ * @param {string} responseContent - the content of the response
+ * @returns {Promise} - the created message
+ * @throws {Error} - if the chat is not found
+ */
+ async appendModelResponse(chatId, responseContent) {
+ const chat = await this.readChat(chatId);
+ const message = {
+ role: chat.persona.name,
+ content: responseContent,
+ timestamp: new Date(),
+ };
+ chat.messages.push(message);
+ await idb.put(chatId, chat, this.store);
+ return message;
+ }
+
+ /**
+ * Delete a chat from the store
+ * @async
+ * @param {string} id - the id of the chat
+ * @returns {Promise}
+ */
+ async deleteChat(id) {
+ await idb.rm(id, this.store);
+ }
+}
diff --git a/src/stores/chats.js b/src/stores/chats.js
deleted file mode 100644
index 0d8d1fb..0000000
--- a/src/stores/chats.js
+++ /dev/null
@@ -1,76 +0,0 @@
-import { defineStore } from "pinia";
-
-import models from "../utils/models.js";
-import { v4 as uuidv4 } from "uuid";
-
-export const useChats = defineStore("chats", {
- state: () => ({
- chats: [],
- }),
- getters: {},
- actions: {
- // any amount of arguments, return a promise or not
- setModel(model) {
- // you can directly mutate the state
- this.model = model;
- },
-
- loadFromStorage() {
- let savedChats = localStorage.getItem("assistant-chats");
- if (savedChats) {
- savedChats = JSON.parse(savedChats);
- let model_urls = models.map((m) => m.apiUrl);
- for (let chat of savedChats) {
- if (chat.id == undefined) {
- chat.id = uuidv4();
- }
- chat.unreadCount = 0;
- // check if chat model is in the list of models
- if (!model_urls.includes(chat.model.apiUrl)) {
- // check if there is another model with the same name
- const model = models.find((m) => m.name === chat.model.name);
- if (model) {
- chat.model = model;
- } else {
- // set the default model
- chat.model = models[0];
- }
- } else {
- // update the model to the latest version
- chat.model = models.find((m) => m.apiUrl === chat.model.apiUrl);
- }
- }
- } else {
- savedChats = [];
- }
- // const defaultPrompts = this.prompts;
-
- // TODO: verify models, and that they are still in the list.
- this.chats = savedChats;
- },
-
- saveToStorage() {
- localStorage.setItem("assistant-chats", JSON.stringify(this.chats));
- },
-
- getChat(chat_id) {
- for (let chat of this.chats) {
- if (chat.id == chat_id) return chat;
- }
- },
-
- addChat(chat) {
- this.chats.push(chat);
- this.saveToStorage();
- },
-
- deleteChat(chat) {
- const index = this.chats.indexOf(chat);
- if (index > -1) {
- // only splice array when item is found
- this.chats.splice(index, 1); // 2nd parameter means remove one item only
- this.saveToStorage();
- }
- },
- },
-});
diff --git a/src/stores/example-store.js b/src/stores/counter-store.js
similarity index 65%
rename from src/stores/example-store.js
rename to src/stores/counter-store.js
index 6e68051..83e8390 100644
--- a/src/stores/example-store.js
+++ b/src/stores/counter-store.js
@@ -1,6 +1,6 @@
-import { defineStore } from "pinia";
+import { defineStore } from 'pinia';
-export const useCounterStore = defineStore("counter", {
+export const useCounterStore = defineStore('counter', {
state: () => ({
counter: 0,
}),
diff --git a/src/stores/index.js b/src/stores/index.js
index 6630bd6..6bc65d6 100644
--- a/src/stores/index.js
+++ b/src/stores/index.js
@@ -1,5 +1,5 @@
-import { store } from "quasar/wrappers";
-import { createPinia } from "pinia";
+import { store } from 'quasar/wrappers';
+import { createPinia } from 'pinia';
/*
* If not building with SSR mode, you can
diff --git a/src/stores/knowledge-db.js b/src/stores/knowledge-db.js
deleted file mode 100644
index 764dc01..0000000
--- a/src/stores/knowledge-db.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import { defineStore } from "pinia";
-import { KnowledgeDb } from "libertai-js";
-
-export const KNOWLEDGE_DB_PINIA_KEY = "knowledgeDb-pinia-key";
-
-export const useKnowledgeDBStore = defineStore(KNOWLEDGE_DB_PINIA_KEY, {
- state: () => ({
- knowledgeDb: new KnowledgeDb(),
- }),
- actions: {
- documents() {
- return this.knowledgeDb.documents;
- },
-
- async load() {
- return await this.knowledgeDb.load();
- },
- async addDocument(title, content) {
- return await this.knowledgeDb.addDocument(title, content);
- },
- // TODO: probably horribly inefficient, but it works for now
- async searchDocuments(query) {
- return await this.knowledgeDb.searchDocuments(
- query,
- (k = 5),
- (max_distannce = 15),
- );
- },
- },
-});
diff --git a/src/stores/knowledge-store.js b/src/stores/knowledge-store.js
new file mode 100644
index 0000000..d0e9851
--- /dev/null
+++ b/src/stores/knowledge-store.js
@@ -0,0 +1,43 @@
+import { defineStore } from 'pinia';
+import { KnowledgeStore } from '@libertai/libertai-js';
+import { defaultKnowledge } from '../utils/knowledge';
+
+export const KNOWLEDGE_STORE_PINIA_KEY = 'knowledge-store-pinia-key';
+
+export const useKnowledgeStore = defineStore(KNOWLEDGE_STORE_PINIA_KEY, {
+ state: () => ({
+ knowledgeStore: new KnowledgeStore(),
+ }),
+ actions: {
+ documents() {
+ // Return all documents as a list
+ let docs = [];
+ for (const [_key, value] of this.knowledgeStore.documents) {
+ docs.push(value);
+ }
+ return docs;
+ },
+
+ async load() {
+ await this.knowledgeStore.load();
+ let defaultDocumentTitles = defaultKnowledge.map((doc) => doc.title);
+ let documentTitles = this.documents().map((doc) => doc.title);
+ // Check if default documents are already in the store
+ let missingDocuments = defaultDocumentTitles.filter((title) => !documentTitles.includes(title));
+ // Add missing documents
+ let addedDocuments = [];
+ for (let title of missingDocuments) {
+ let doc = defaultKnowledge.find((doc) => doc.title === title);
+ addedDocuments.push(this.addDocument(doc.title, doc.content, doc.tags));
+ }
+ await Promise.all(addedDocuments);
+ return;
+ },
+ async addDocument(title, content, tags = []) {
+ return await this.knowledgeStore.addDocument(title, content, tags);
+ },
+ async searchDocuments(query, tags = []) {
+ return await this.knowledgeStore.searchDocuments(query, 3, 18, tags);
+ },
+ },
+});
diff --git a/src/stores/models-store.js b/src/stores/models-store.js
new file mode 100644
index 0000000..734b351
--- /dev/null
+++ b/src/stores/models-store.js
@@ -0,0 +1,26 @@
+import { defineStore } from 'pinia';
+
+import { defaultModels } from '../utils/models.js';
+
+export const useModelsStore = defineStore('models', {
+ state: () => ({
+ models: defaultModels,
+ selectedModel: defaultModels[0],
+ }),
+ getters: {},
+ actions: {
+ // any amount of arguments, return a promise or not
+ setModel(model) {
+ // you can directly mutate the state
+ this.selectedModel = model;
+ },
+ setModelByURL(modelUrl) {
+ console.log('searching for', modelUrl);
+ for (let model of defaultModels)
+ if (model.apiUrl == modelUrl) {
+ console.log('found', model.name, model.apiUrl);
+ this.selectedModel = model;
+ }
+ },
+ },
+});
diff --git a/src/stores/models.js b/src/stores/models.js
deleted file mode 100644
index cf61745..0000000
--- a/src/stores/models.js
+++ /dev/null
@@ -1,27 +0,0 @@
-import { defineStore } from "pinia";
-
-import models from "../utils/models.js";
-
-export const useModels = defineStore("models", {
- state: () => ({
- models: models,
- model: models[0],
- }),
- getters: {},
- actions: {
- // any amount of arguments, return a promise or not
- setModel(model) {
- // you can directly mutate the state
- this.model = model;
- },
- setModelByURL(modelUrl) {
- console.log("searching for", modelUrl);
- for (let model of models) {
- if (model.apiUrl == modelUrl) {
- console.log("found", model.name, model.apiUrl);
- this.model = model;
- }
- }
- },
- },
-});
diff --git a/src/stores/personas-store.js b/src/stores/personas-store.js
new file mode 100644
index 0000000..9099cb8
--- /dev/null
+++ b/src/stores/personas-store.js
@@ -0,0 +1,17 @@
+import { defineStore } from 'pinia';
+
+import { defaultPersonas } from '../utils/personas.js';
+
+export const usePersonasStore = defineStore('personas', {
+ state: () => ({
+ personas: defaultPersonas,
+ persona: defaultPersonas[0],
+ }),
+ getters: {},
+ actions: {
+ setPersona(persona) {
+ // you can directly mutate the state
+ this.persona = persona;
+ },
+ },
+});
diff --git a/src/stores/points.js b/src/stores/points.js
index 12efb2f..bbd98ce 100644
--- a/src/stores/points.js
+++ b/src/stores/points.js
@@ -1,11 +1,11 @@
-import { defineStore } from "pinia";
+import { defineStore } from 'pinia';
-import { Get } from "aleph-sdk-ts/dist/messages/aggregate";
+import { Get } from 'aleph-sdk-ts/dist/messages/aggregate';
-export const usePoints = defineStore("points", {
+export const usePoints = defineStore('points', {
state: () => ({
- points_source: "0xCBFc3EeC41CBBfCAcc50337d712890C47a14ba99",
- api_server: "https://official.aleph.cloud",
+ points_source: '0xCBFc3EeC41CBBfCAcc50337d712890C47a14ba99',
+ api_server: 'https://official.aleph.cloud',
points: {},
pending_points: {},
info: {
@@ -32,9 +32,9 @@ export const usePoints = defineStore("points", {
this.points = pointsData.points;
this.info = pointsData.info;
this.pending_points = pointsData.pending_points;
- console.log("points", this.points);
- console.log("info", this.info);
- console.log("pending_points", this.pending_points);
+ console.log('points', this.points);
+ console.log('info', this.info);
+ console.log('pending_points', this.pending_points);
},
getAddressPoints(address) {
@@ -66,8 +66,7 @@ export const usePoints = defineStore("points", {
const totalDuration = pendingTime - lastTime;
const currentDuration = currentTime - lastTime;
const hourlyRate = (pendingPoints / totalDuration) * 3600;
- const currentPendingPoints =
- (pendingPoints / totalDuration) * currentDuration;
+ const currentPendingPoints = (pendingPoints / totalDuration) * currentDuration;
return {
hourlyRate: hourlyRate,
pending: Math.max(currentPendingPoints, 0),
diff --git a/src/stores/prompts.js b/src/stores/prompts.js
deleted file mode 100644
index 0161b02..0000000
--- a/src/stores/prompts.js
+++ /dev/null
@@ -1,17 +0,0 @@
-import { defineStore } from "pinia";
-
-import prompts from "../utils/prompts.js";
-
-export const usePrompts = defineStore("prompts", {
- state: () => ({
- prompts: prompts,
- prompt: prompts[0],
- }),
- getters: {},
- actions: {
- setPrompt(prompt) {
- // you can directly mutate the state
- this.prompt = prompt;
- },
- },
-});
diff --git a/src/utils/chat.js b/src/utils/chat.js
index 5261947..7759d6c 100644
--- a/src/utils/chat.js
+++ b/src/utils/chat.js
@@ -1,244 +1,69 @@
-import { findMatches } from "../utils/knowledge";
-import axios from "axios";
-import { v4 as uuidv4 } from "uuid";
-
-const chat_openers_prompt =
- "Summarization of chat first sentence for menu items:\n\n" +
- '### Input: """Hello, can you please write a short hello world code for me?"""\n### Summary:\nHello world\n\n' +
- '### Input: """What is the color of Henry IV\'s white horse?\nI\'m not really sure"""\n### Summary:\nHenry IV\'s horse color\n\n';
-
-export function calculateNumberOfTokens(line) {
- return line.length / 2.7;
-}
-
-async function preparePrompt(messages, activePrompt, model) {
- let chatLog = "";
- let currentTokens = 0;
- let persona_name = activePrompt.users[1].username;
- let user_name = activePrompt.users[0].username;
- let context_document = activePrompt.context_document;
-
- let basePrompt = `${model.base_prompt}${model.persona_start}${activePrompt.persona}`;
- basePrompt = basePrompt.replaceAll("{{user}}", user_name);
- basePrompt = basePrompt.replaceAll("{{char}}", persona_name);
- basePrompt = basePrompt.replaceAll("{{model}}", model.name);
-
- if (context_document) {
- basePrompt = `${basePrompt}\n${model.line_separator}\n${model.log_start}${model.user_prepend}CONTEXT DOCUMENT${model.user_append}${context_document}\n`;
- } else {
- basePrompt = `${basePrompt}\n${model.log_start}`;
- }
-
- const promptcalc = `${basePrompt}\n${model.log_start}\n${model.user_prepend}${persona_name}${model.user_append}`;
- const initialPromptTokens = calculateNumberOfTokens(promptcalc);
- const maxTokens = model.maxTokens - initialPromptTokens;
-
- const chatLogLines = messages.map((msg) => {
- let name = msg.username;
- if (name == activePrompt.users[0].username) {
- name = user_name;
- } else if (name == activePrompt.users[1].username) {
- name = persona_name;
- }
- return `${model.user_prepend}${name}${model.user_append}${msg.content}`;
- });
- const seenInfo = new Set();
- for (let i = chatLogLines.length - 1; i >= 0; i--) {
- const line = chatLogLines[i];
- const lineTokens = calculateNumberOfTokens(line);
-
- // Check for matching knowledge DB entries and add to chat log if not seen before
- const matchedEntries = findMatches(line);
- let infoTokens = 0;
- let infoText = "";
- for (const entry of matchedEntries) {
- if (!seenInfo.has(entry)) {
- const formattedEntry = `### INFO: ${entry}`;
- infoTokens += calculateNumberOfTokens(formattedEntry);
- infoText += `${formattedEntry}\n`;
- seenInfo.add(entry);
- }
- }
-
- // If adding the line won't exceed the token limit, add it to the chat log.
- if (currentTokens + lineTokens + infoTokens <= maxTokens) {
- chatLog = `${model.line_separator}${line}\n${chatLog}`;
- currentTokens += lineTokens;
-
- if (infoText) {
- console.log("adding infotext", infoText);
- chatLog = `${infoText}${chatLog}`;
- currentTokens += infoTokens;
- }
- } else {
- // If adding the line would exceed the token limit, stop the loop.
- break;
- }
- }
- console.log(basePrompt, model.log_start, model);
- return `${basePrompt}\n${chatLog}${model.line_separator}${model.user_prepend}${persona_name}${model.user_append}`;
-}
-
-export async function complete(prompt, model, stop_sequences, handle_cache) {
- // Actually do the completion, calling the engine API
- let params = {
- prompt: prompt,
- temperature: model.temperature,
- top_p: model.top_p,
- top_k: model.top_k,
- min_p: model.min_p,
+import { LlamaCppApiEngine } from '@libertai/libertai-js';
+
+import { modelDefaults, promptFormatDefaults } from './models';
+
+export const defaultChatTopic = 'New Chat';
+
+const chatTopicPromptFormat = {
+ ...promptFormatDefaults,
+ additionalStopSequences: promptFormatDefaults.additionalStopSequences.concat(['\n']),
+};
+
+const chatTopicModel = {
+ ...modelDefaults,
+ apiUrl: 'https://curated.aleph.cloud/vm/a8b6d895cfe757d4bc5db9ba30675b5031fe3189a99a14f13d5210c473220caf/completion',
+ promptFormat: chatTopicPromptFormat,
+};
+
+const chatTopicPersona = {
+ avatarUrl: 'https://this-is-a-fake-url.org',
+ name: 'summary',
+ description:
+ 'You are a summary function provided with input. Provide an at most 5 word summary of the first sentence of the provided input for the purpose of determining menu item names',
+};
+
+const chatTopicExamples = [
+ {
+ role: 'input',
+ content: 'Hello, can you please write a short hello world code for me?',
+ },
+ {
+ role: 'summary',
+ content: 'Hello world',
+ },
+ {
+ role: 'input',
+ content: "What is the color of Henry IV's white horse?\nI'm not really sure",
+ },
+ {
+ role: 'summary',
+ content: "Henry IV's horse color",
+ },
+];
+
+/**
+ * Infer the topic of a chat message using a sample of a text
+ *
+ * @async
+ * @param {string} input - The text to infer the topic from
+ * @returns {string} - The inferred topic
+ * @throws {Error} - If the topic could not be inferred
+ */
+export async function inferChatTopic(input) {
+ const engine = new LlamaCppApiEngine();
+ const message = {
+ role: 'input',
+ content: input,
};
- console.log(model.engine);
- if (model.engine == "kobold") {
- params = {
- ...params,
- n: 1,
- max_context_length: model.maxTokens,
- max_length: model.maxLength,
- rep_pen: 1.08,
- top_a: 0,
- typical: 1,
- tfs: 1,
- rep_pen_range: 1024,
- rep_pen_slope: 0.7,
- sampler_order: model.sampler_order,
- quiet: true,
- stop_sequence: stop_sequences,
- use_default_badwordsids: false,
- };
- } else if (model.engine == "llamacpp") {
- let slot_id = -1;
- if (handle_cache && model.slot_id !== undefined) {
- slot_id = model.slot_id;
- }
- params = {
- ...params,
- n_predict: model.maxLength,
- id_slot: slot_id,
- slot_id: slot_id,
- cache_prompt: handle_cache,
- typical_p: 1,
- tfs_z: 1,
- stop: stop_sequences,
- use_default_badwordsids: false,
- };
- } else if (model.engine == "openai") {
- params = {
- ...params,
- n: 1,
- stop: stop_sequences,
- max_tokens: model.maxLength,
- };
- }
- const response = await axios.post(model.apiUrl, params, {
- withCredentials: model.pass_credentials,
- });
-
- // Parse the response
- if (model.engine == "kobold") {
- console.log(response.data);
- return response.data.results[0].text;
- } else if (model.engine == "llamacpp") {
- if (handle_cache) {
- if (response.data.id_slot !== undefined) {
- model.slot_id = response.data.id_slot;
- } else if (response.data.slot_id !== undefined) {
- model.slot_id = response.data.slot_id;
- }
- }
- model.slot_id = response.data.id_slot;
- return response.data.content;
- } else if (model.engine == "openai") {
- return response.data.choices[0].text;
- }
-}
-
-export async function* generateAnswer(messages, activePrompt, model) {
- console.log(activePrompt, model);
- let user_name = activePrompt.users[0].username;
- let persona_name = activePrompt.users[1].username;
- const prompt = await preparePrompt(messages, activePrompt, model);
- console.log(prompt);
-
- let isUnfinished = true;
- let tries = 0;
- let compoundedResult = "";
- let stop_sequences = [...model.stop_sequences];
- if (stop_sequences.length == 0) {
- stop_sequences = [`${model.user_prepend}${user_name}:`];
- }
- let alternative_stop_sequence = `${model.user_prepend}${persona_name}:`;
- stop_sequences.push(alternative_stop_sequence);
-
- let alternative_stop_sequence_2 = `${user_name}:`;
- stop_sequences.push(alternative_stop_sequence_2);
- while (isUnfinished && tries < model.maxTries) {
- tries += 1;
- const lastResult = await complete(
- prompt + compoundedResult,
- model,
- stop_sequences,
- true,
- );
- const fullResult = compoundedResult + lastResult;
- let results = fullResult;
- console.log(results);
- /// let's refactor this by iterating on stop sequences
- /// results = fullResult.split(`\n${alternative_stop_sequence}`).join("|||||").split(`\n${alternative_stop_sequence_2}`).join("|||||").split(`\n${stop_sequences[0]}`).join("|||||").split("|||||");
- for (let i = 0; i < stop_sequences.length; i++) {
- results = results.split(`\n${stop_sequences[i]}`).join("|||||");
- results = results.split(`${stop_sequences[i]}`).join("|||||");
- }
- results = results.split("|||||");
-
- const firstMessage = results[0].trimEnd();
- compoundedResult = firstMessage;
- let to_yield = compoundedResult;
+ const messages = chatTopicExamples.concat(message);
- if ((results.length > 1) | (lastResult < model.maxLength)) {
- isUnfinished = false;
- } else {
- isUnfinished = true;
- if (tries < model.maxTries) {
- to_yield += " *[writing ...]*";
- }
+ try {
+ for await (const output of engine.generateAnswer(messages, chatTopicModel, chatTopicPersona)) {
+ // Try just returning the first output (since that's what we were doing before we split this out)
+ return output.content.split(chatTopicPromptFormat.lineSeparator)[0];
}
- yield { content: to_yield, unfinished: isUnfinished };
+ } catch (error) {
+ console.error('Error during completion:', error);
+ throw Error('Could not infer chat topic: {error}');
}
}
-
-export function createMessage(senderId, username, content) {
- const currentDate = new Date();
- const dateString = currentDate.toLocaleDateString("en-US", {
- day: "numeric",
- month: "long",
- });
- const timeString = currentDate.toLocaleTimeString("en-US", {
- hour: "2-digit",
- minute: "2-digit",
- hour12: true,
- });
- return {
- _id: uuidv4(),
- senderId: senderId,
- username: username,
- content: content,
- date: dateString,
- timestamp: timeString,
- unfinished: false,
- in_error: false,
- };
-}
-
-export async function getChatName(prompt, model) {
- const summary_prompt =
- chat_openers_prompt + `### Input: """${prompt}"""\n` + "### Summary:\n";
-
- const result = (
- await complete(summary_prompt, model, ["\n", "<|endoftext|>"], false)
- )
- .replace("#", "")
- .trim();
-
- return result;
-}
diff --git a/src/utils/idb.js b/src/utils/idb.js
new file mode 100644
index 0000000..a7c41c7
--- /dev/null
+++ b/src/utils/idb.js
@@ -0,0 +1,89 @@
+import localforage from 'localforage';
+
+/**
+ * Create a new localforage instance with the given name
+ * @param {string} name - The name of the store
+ * @returns {LocalForage}
+ */
+export function createStore(name) {
+ return localforage.createInstance({ name });
+}
+
+/**
+ * Put an item in the store
+ * @param {string} id - The id of the item
+ * @param {any} item - The item to store
+ * @param {LocalForage} store - The store to put the item in
+ * @returns {Promise} - The item stored
+ */
+export async function put(id, item, store) {
+ return store.setItem(id, item);
+}
+
+/**
+ * Get an item from the store
+ * @param {string} id - The id of the item
+ * @param {LocalForage} store - The store to get the item from
+ * @returns {Promise} - The item or null if it doesn't exist
+ */
+export async function get(id, store) {
+ const item = await store.getItem(id);
+ // Check if item is null
+ if (item === null) {
+ return null;
+ }
+ return item;
+}
+
+/**
+ * Check if an item exists in the store
+ * @param {string} id - The id of the item
+ * @param {LocalForage} store - The store to check
+ * @returns {Promise} - True if the item exists, false otherwise
+ */
+export async function exists(id, store) {
+ const key = await store.getItem(id);
+ return key !== null;
+}
+
+/**
+ * Remove an item from the store
+ * @param {string} id - The id of the item to remove
+ * @param {LocalForage} store - The store to remove the item from
+ * @returns {Promise}
+ */
+export async function rm(id, store) {
+ return store.removeItem(id);
+}
+
+/**
+ * Drop the store
+ * @param {LocalForage} store - The store to drop
+ * @returns {Promise}
+ */
+export async function dropStore(store) {
+ return store.dropInstance();
+}
+
+/**
+ * Clear all items in the store
+ * @param {LocalForage} store - The store to clear
+ * @returns {Promise}
+ */
+export async function clear(store = null) {
+ if (store) {
+ return dropStore(store);
+ } else {
+ return localforage.clear();
+ }
+}
+
+export default {
+ createStore,
+ get,
+ put,
+ exists,
+ rm,
+ dropStore,
+ clear,
+};
diff --git a/src/utils/knowledge.js b/src/utils/knowledge.js
index c82edb5..b022fe1 100644
--- a/src/utils/knowledge.js
+++ b/src/utils/knowledge.js
@@ -1,25 +1,33 @@
-const knowledgeDB = {
- "aleph|aleph.im|$aleph":
- "aleph.im: decentralized, serverless computing & cloud. $ALEPH. Enables data storage, compute & DApps in dcentralzd manner. Ensures data privacy & resilience. Supports cross-chain interactions.",
- "nft|nfts":
- "NFTs: Non-fungible tokens. Unique, indivisible & verifiable digital assets. Built on blockchain tech, used for art, collectibles, virtual goods, in-game items, etc. Ownership & provenance tracking. Tradable on NFT marketplaces.",
-};
-
-export function findMatches(text) {
- const wordsRegex = /[\w.$]+/g;
- const terms = text.toLowerCase().match(wordsRegex) || [];
- const matches = new Set();
-
- for (const term of terms) {
- for (const key in knowledgeDB) {
- if (Object.prototype.hasOwnProperty.call(knowledgeDB, key)) {
- const keyTerms = key.toLowerCase().split("|");
- if (keyTerms.includes(term)) {
- matches.add(knowledgeDB[key]);
- }
- }
- }
- }
-
- return Array.from(matches);
-}
+// Sample knowledge data about Aleph and Libertai (AI generated hehe)
+export const defaultKnowledge = [
+ {
+ title: 'Aleph.Im Background',
+ content:
+ 'Aleph.im is a decentralized cloud computing platform that aims ' +
+ 'to provide an alternative to traditional centralized cloud providers ' +
+ 'like Amazon Web Services (AWS) or Google Cloud. By leveraging blockchain ' +
+ 'technology and decentralized infrastructure, Aleph.im offers a range of ' +
+ 'benefits that set it apart from its centralized counterparts. Aleph.im offers ' +
+ 'improved scalability and fault tolerance. The decentralized nature of the platform means ' +
+ 'that it can handle increased demand and traffic without experiencing the same bottlenecks ' +
+ 'and performance issues that can plague centralized cloud providers. If one node in the network ' +
+ 'goes down, the system can automatically route requests to other available nodes, ensuring ' +
+ 'ensuring continuous uptime and availability. Another key feature of Aleph.im is its ' +
+ 'support for AI and machine learning workloads. The platform provides a range of tools ' +
+ 'and services that enable developers to deploy and run AI models in a decentralized manner. ' +
+ 'This includes the ability to train and deploy large language models, which are a critical ' +
+ 'component of many modern AI applications.',
+ tags: ['Aleph.Im', 'Blockchain', 'Decentralized Cloud Computing', 'DePin', 'Aleph', 'web3'],
+ },
+ {
+ title: 'Libertai.io Background',
+ content:
+ "Libertai.io is a decentralized AI platform that is built on top of Aleph.im's cloud computing platform. " +
+ 'Unlike traditional centralized AI platforms like OpenAI, Libertai.io is designed to operate in a decentralized manner, ' +
+ "leveraging the security and scalability of Aleph.im's infrastructure to provide both privacy and performance. " +
+ "Slince the AI models that power the assistant are deployed and run on Aleph.im's network, rather than on a centralized server, " +
+ 'ulsers can be confident that their data and interactions are not being monitored or exploited by a single entity. This is particularly ' +
+ 'ilmportant for individuals and organizations that are concerned about the privacy implications of using traditional AI assistants. ',
+ tags: ['Libertai', 'AI', 'Decentralized AI', 'Aleph.Im', 'Privacy', 'Security', 'depin'],
+ },
+];
diff --git a/src/utils/models.js b/src/utils/models.js
index 5f9e8d3..bfbcd12 100644
--- a/src/utils/models.js
+++ b/src/utils/models.js
@@ -1,125 +1,82 @@
-const defaults = {
- maxLength: 15,
- maxTries: 60,
- maxTokens: 8192,
- temperature: 0.7,
- sampler_order: [6, 0, 1, 3, 4, 2, 5],
- min_p: 0.05,
- top_p: 0.9,
- top_k: 40,
- model_type: "knowledge",
- log_start: "",
- base_prompt: "Discussion is between {{user}} and {{char}}.\n",
- persona_start: "",
- scenario_start: "",
- user_prepend: "### ",
- user_append: ": ",
- stop_sequences: ["###"],
- line_separator: "",
- engine: "kobold",
- pass_credentials: true,
+export const promptFormatDefaults = {
+ userPrepend: '<|im_start|>',
+ userAppend: '\n',
+ lineSeparator: '\n',
+ stopSequence: '<|im_end|>',
+ additionalStopSequences: ['<|endoftext|>', '<|', '|', ''],
};
-const chatml = {
- base_prompt:
- "<|im_start|>system\nYou are {{char}}, discussing with {{user}}.\n",
- log_start: "",
- user_prepend: "<|im_start|>",
- user_append: "\n",
- line_separator: "<|im_end|>\n",
- stop_sequences: ["<|im_end|>", "<|endoftext|>", "<|"],
+export const modelDefaults = {
+ maxTokens: 8192,
+ maxPredict: 15,
+ maxTries: 60,
+ temperature: 0.7,
+ minP: 0.05,
+ topP: 0.9,
+ topK: 40,
+ promptFormat: promptFormatDefaults,
};
-export default [
+/*
+ * Default Models Configuration
+ */
+export const defaultModels = [
+ // AlphaMonarch
{
- ...defaults,
- ...chatml,
- maxLength: 15,
+ name: 'AlphaMonarch (7B, fast)',
+ ...modelDefaults,
+ // Set our apiUrl
+ apiUrl:
+ 'https://curated.aleph.cloud/vm/a8b6d895cfe757d4bc5db9ba30675b5031fe3189a99a14f13d5210c473220caf/completion',
+ // Allow a larger prompt length
maxTokens: 16384,
- min_p: 0.1,
- top_p: 0.95,
+ // Set a minimum probability
+ minP: 0.1,
+ // Set a slightly higher top probability
+ topP: 0.95,
+ // Set a slightly higher temperature
temperature: 0.8,
- name: "AlphaMonarch (7B, fast)",
- apiUrl:
- "https://curated.aleph.cloud/vm/a8b6d895cfe757d4bc5db9ba30675b5031fe3189a99a14f13d5210c473220caf/completion",
- engine: "llamacpp",
- pass_credentials: true,
- stop_sequences: [
- "<|im_end|>",
- "<|endoftext|>",
- "<|",
- "|",
- "",
- "",
- "<|endoftext|>",
- "<|",
- "",
- "","<|endoftext|>", "<|", "","",
- "<|endoftext|>",
- "<|",
- "",
- "