diff --git a/.env.example b/.env.example index 126bd5f20..d5cfc5cb3 100644 --- a/.env.example +++ b/.env.example @@ -35,5 +35,10 @@ OPENAI_LIKE_API_BASE_URL= # Get your OpenAI Like API Key OPENAI_LIKE_API_KEY= +# Get your Mistral API Key by following these instructions - +# https://console.mistral.ai/api-keys/ +# You only need this environment variable set if you want to use Mistral models +MISTRAL_API_KEY= + # Include this environment variable if you want more logging for debugging locally VITE_LOG_LEVEL=debug diff --git a/.github/workflows/github-build-push.yml b/.github/workflows/github-build-push.yml new file mode 100644 index 000000000..4d4db05d8 --- /dev/null +++ b/.github/workflows/github-build-push.yml @@ -0,0 +1,39 @@ +name: Build and Push Container + +on: + push: + branches: + - main + # paths: + # - 'Dockerfile' + workflow_dispatch: +jobs: + build-and-push: + runs-on: [ubuntu-latest] + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and Push Containers + uses: docker/build-push-action@v2 + with: + context: . + file: Dockerfile + platforms: linux/amd64,linux/arm64 + push: true + tags: | + ghcr.io/${{ github.repository }}:latest + ghcr.io/${{ github.repository }}:${{ github.sha }} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ef4141cd8..ad3b1951a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,110 +1,98 @@ -[![Bolt Open Source Codebase](./public/social_preview_index.jpg)](https://bolt.new) +# Contributing to Bolt.new Fork -> Welcome to the **Bolt** open-source codebase! This repo contains a simple example app using the core components from bolt.new to help you get started building **AI-powered software development tools** powered by StackBlitz’s **WebContainer API**. +First off, thank you for considering contributing to Bolt.new! This fork aims to expand the capabilities of the original project by integrating multiple LLM providers and enhancing functionality. Every contribution helps make Bolt.new a better tool for developers worldwide. -### Why Build with Bolt + WebContainer API +## 📋 Table of Contents +- [Code of Conduct](#code-of-conduct) +- [How Can I Contribute?](#how-can-i-contribute) +- [Pull Request Guidelines](#pull-request-guidelines) +- [Coding Standards](#coding-standards) +- [Development Setup](#development-setup) +- [Project Structure](#project-structure) -By building with the Bolt + WebContainer API you can create browser-based applications that let users **prompt, run, edit, and deploy** full-stack web apps directly in the browser, without the need for virtual machines. With WebContainer API, you can build apps that give AI direct access and full control over a **Node.js server**, **filesystem**, **package manager** and **dev terminal** inside your users browser tab. This powerful combination allows you to create a new class of development tools that support all major JavaScript libraries and Node packages right out of the box, all without remote environments or local installs. +## Code of Conduct -### What’s the Difference Between Bolt (This Repo) and [Bolt.new](https://bolt.new)? +This project and everyone participating in it is governed by our Code of Conduct. By participating, you are expected to uphold this code. Please report unacceptable behavior to the project maintainers. -- **Bolt.new**: This is the **commercial product** from StackBlitz—a hosted, browser-based AI development tool that enables users to prompt, run, edit, and deploy full-stack web applications directly in the browser. Built on top of the [Bolt open-source repo](https://github.com/stackblitz/bolt.new) and powered by the StackBlitz **WebContainer API**. +## How Can I Contribute? -- **Bolt (This Repo)**: This open-source repository provides the core components used to make **Bolt.new**. This repo contains the UI interface for Bolt as well as the server components, built using [Remix Run](https://remix.run/). By leveraging this repo and StackBlitz’s **WebContainer API**, you can create your own AI-powered development tools and full-stack applications that run entirely in the browser. +### 🐞 Reporting Bugs and Feature Requests +- Check the issue tracker to avoid duplicates +- Use the issue templates when available +- Include as much relevant information as possible +- For bugs, add steps to reproduce the issue -# Get Started Building with Bolt +### 🔧 Code Contributions +1. Fork the repository +2. Create a new branch for your feature/fix +3. Write your code +4. Submit a pull request -Bolt combines the capabilities of AI with sandboxed development environments to create a collaborative experience where code can be developed by the assistant and the programmer together. Bolt combines [WebContainer API](https://webcontainers.io/api) with [Claude Sonnet 3.5](https://www.anthropic.com/news/claude-3-5-sonnet) using [Remix](https://remix.run/) and the [AI SDK](https://sdk.vercel.ai/). +### ✨ Becoming a Core Contributor +We're looking for dedicated contributors to help maintain and grow this project. If you're interested in becoming a core contributor, please fill out our [Contributor Application Form](https://forms.gle/TBSteXSDCtBDwr5m7). -### WebContainer API +## Pull Request Guidelines -Bolt uses [WebContainers](https://webcontainers.io/) to run generated code in the browser. WebContainers provide Bolt with a full-stack sandbox environment using [WebContainer API](https://webcontainers.io/api). WebContainers run full-stack applications directly in the browser without the cost and security concerns of cloud hosted AI agents. WebContainers are interactive and editable, and enables Bolt's AI to run code and understand any changes from the user. +### 📝 PR Checklist +- [ ] Branch from the main branch +- [ ] Update documentation if needed +- [ ] Manually verify all new functionality works as expected +- [ ] Keep PRs focused and atomic -The [WebContainer API](https://webcontainers.io) is free for personal and open source usage. If you're building an application for commercial usage, you can learn more about our [WebContainer API commercial usage pricing here](https://stackblitz.com/pricing#webcontainer-api). +### 👀 Review Process +1. Manually test the changes +2. At least one maintainer review required +3. Address all review comments +4. Maintain clean commit history -### Remix App +## Coding Standards -Bolt is built with [Remix](https://remix.run/) and -deployed using [CloudFlare Pages](https://pages.cloudflare.com/) and -[CloudFlare Workers](https://workers.cloudflare.com/). +### 💻 General Guidelines +- Follow existing code style +- Comment complex logic +- Keep functions focused and small +- Use meaningful variable names -### AI SDK Integration - -Bolt uses the [AI SDK](https://github.com/vercel/ai) to integrate with AI -models. At this time, Bolt supports using Anthropic's Claude Sonnet 3.5. -You can get an API key from the [Anthropic API Console](https://console.anthropic.com/) to use with Bolt. -Take a look at how [Bolt uses the AI SDK](https://github.com/stackblitz/bolt.new/tree/main/app/lib/.server/llm) - -## Prerequisites - -Before you begin, ensure you have the following installed: - -- Node.js (v20.15.1) -- pnpm (v9.4.0) - -## Setup - -1. Clone the repository (if you haven't already): +## Development Setup +### 🔄 Initial Setup +1. Clone the repository: ```bash -git clone https://github.com/stackblitz/bolt.new.git +git clone https://github.com/coleam00/bolt.new-any-llm.git ``` 2. Install dependencies: - ```bash pnpm install ``` -3. Create a `.env.local` file in the root directory and add your Anthropic API key: - -``` +3. Set up environment variables: + - Rename `.env.example` to `.env.local` + - Add your LLM API keys (only set the ones you plan to use): +```bash +GROQ_API_KEY=XXX +OPENAI_API_KEY=XXX ANTHROPIC_API_KEY=XXX +... ``` - -Optionally, you can set the debug level: - -``` + - Optionally set debug level: +```bash VITE_LOG_LEVEL=debug ``` - **Important**: Never commit your `.env.local` file to version control. It's already included in .gitignore. -## Available Scripts - -- `pnpm run dev`: Starts the development server. -- `pnpm run build`: Builds the project. -- `pnpm run start`: Runs the built application locally using Wrangler Pages. This script uses `bindings.sh` to set up necessary bindings so you don't have to duplicate environment variables. -- `pnpm run preview`: Builds the project and then starts it locally, useful for testing the production build. Note, HTTP streaming currently doesn't work as expected with `wrangler pages dev`. -- `pnpm test`: Runs the test suite using Vitest. -- `pnpm run typecheck`: Runs TypeScript type checking. -- `pnpm run typegen`: Generates TypeScript types using Wrangler. -- `pnpm run deploy`: Builds the project and deploys it to Cloudflare Pages. - -## Development - -To start the development server: - +### 🚀 Running the Development Server ```bash pnpm run dev ``` -This will start the Remix Vite development server. - -## Testing +**Note**: You will need Google Chrome Canary to run this locally if you use Chrome! It's an easy install and a good browser for web development anyway. -Run the test suite with: +## Questions? -```bash -pnpm test -``` - -## Deployment - -To deploy the application to Cloudflare Pages: - -```bash -pnpm run deploy -``` +For any questions about contributing, please: +1. Check existing documentation +2. Search through issues +3. Create a new issue with the question label -Make sure you have the necessary permissions and Wrangler is correctly configured for your Cloudflare account. +Thank you for contributing to Bolt.new! 🚀 \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..de88d11e9 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,29 @@ +# Use an official Node.js runtime as the base image +FROM node:20.15.1 + +# Set the working directory in the container +WORKDIR /app + +# Install pnpm +RUN npm install -g pnpm@9.4.0 + +# Copy package.json and pnpm-lock.yaml (if available) +COPY package.json pnpm-lock.yaml* ./ + +# Install dependencies +RUN pnpm install + +# Copy the rest of the application code +COPY . . + +# Build the application +RUN pnpm run build + +# Make sure bindings.sh is executable +RUN chmod +x bindings.sh + +# Expose the port the app runs on (adjust if you specified a different port) +EXPOSE 3000 + +# Start the application +CMD ["pnpm", "run", "start"] \ No newline at end of file diff --git a/README.md b/README.md index 50d8f6bd2..116963f51 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,7 @@ This fork of Bolt.new allows you to choose the LLM that you use for each prompt! - ✅ Autogenerate Ollama models from what is downloaded (@yunatamos) - ✅ Filter models by provider (@jasonm23) - ✅ Download project as ZIP (@fabwaseem) +- ✅ Improvements to the main Bolt.new prompt in `app\lib\.server\llm\prompts.ts` (@kofi-bhr) - ⬜ LM Studio Integration - ⬜ DeepSeek API Integration - ⬜ Together Integration @@ -28,6 +29,7 @@ This fork of Bolt.new allows you to choose the LLM that you use for each prompt! - ⬜ Prompt caching - ⬜ Ability to enter API keys in the UI - ⬜ Prevent Bolt from rewriting files as often +- ⬜ Have LLM plan the project in a MD file for better results/transparency # Bolt.new: AI-Powered Full-Stack Web Development in the Browser @@ -114,7 +116,7 @@ To start the development server: pnpm run dev ``` -This will start the Remix Vite development server. You will need Google Chrome Canary to run this locally! It's an easy install and a good browser for web development anyway. +This will start the Remix Vite development server. You will need Google Chrome Canary to run this locally if you use Chrome! It's an easy install and a good browser for web development anyway. ## Tips and Tricks diff --git a/app/components/workbench/Workbench.client.tsx b/app/components/workbench/Workbench.client.tsx index 839e9a807..29c722c89 100644 --- a/app/components/workbench/Workbench.client.tsx +++ b/app/components/workbench/Workbench.client.tsx @@ -1,7 +1,7 @@ import { useStore } from '@nanostores/react'; import { motion, type HTMLMotionProps, type Variants } from 'framer-motion'; import { computed } from 'nanostores'; -import { memo, useCallback, useEffect } from 'react'; +import { memo, useCallback, useEffect, useState } from 'react'; import { toast } from 'react-toastify'; import { type OnChangeCallback as OnEditorChange, @@ -55,6 +55,8 @@ const workbenchVariants = { export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) => { renderLogger.trace('Workbench'); + const [isSyncing, setIsSyncing] = useState(false); + const hasPreview = useStore(computed(workbenchStore.previews, (previews) => previews.length > 0)); const showWorkbench = useStore(workbenchStore.showWorkbench); const selectedFile = useStore(workbenchStore.selectedFile); @@ -99,6 +101,21 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) => workbenchStore.resetCurrentDocument(); }, []); + const handleSyncFiles = useCallback(async () => { + setIsSyncing(true); + + try { + const directoryHandle = await window.showDirectoryPicker(); + await workbenchStore.syncFiles(directoryHandle); + toast.success('Files synced successfully'); + } catch (error) { + console.error('Error syncing files:', error); + toast.error('Failed to sync files'); + } finally { + setIsSyncing(false); + } + }, []); + return ( chatStarted && (
Download Code + + {isSyncing ?
:
} + {isSyncing ? 'Syncing...' : 'Sync Files'} + { @@ -141,6 +162,31 @@ export const Workbench = memo(({ chatStarted, isStreaming }: WorkspaceProps) =>
Toggle Terminal + { + const repoName = prompt("Please enter a name for your new GitHub repository:", "bolt-generated-project"); + if (!repoName) { + alert("Repository name is required. Push to GitHub cancelled."); + return; + } + const githubUsername = prompt("Please enter your GitHub username:"); + if (!githubUsername) { + alert("GitHub username is required. Push to GitHub cancelled."); + return; + } + const githubToken = prompt("Please enter your GitHub personal access token:"); + if (!githubToken) { + alert("GitHub token is required. Push to GitHub cancelled."); + return; + } + + workbenchStore.pushToGitHub(repoName, githubUsername, githubToken); + }} + > +
+ Push to GitHub + )} ) ); }); - interface ViewProps extends HTMLMotionProps<'div'> { children: JSX.Element; } diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts index fedb0bc5f..a1fd3a022 100644 --- a/app/lib/.server/llm/api-key.ts +++ b/app/lib/.server/llm/api-key.ts @@ -19,8 +19,12 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) { return env.GROQ_API_KEY || cloudflareEnv.GROQ_API_KEY; case 'OpenRouter': return env.OPEN_ROUTER_API_KEY || cloudflareEnv.OPEN_ROUTER_API_KEY; - case "OpenAILike": - return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY; + case 'Deepseek': + return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY + case 'Mistral': + return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY; + case "OpenAILike": + return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY; default: return ""; } diff --git a/app/lib/.server/llm/constants.ts b/app/lib/.server/llm/constants.ts index b24acdf27..7b3a0f245 100644 --- a/app/lib/.server/llm/constants.ts +++ b/app/lib/.server/llm/constants.ts @@ -1,5 +1,5 @@ // see https://docs.anthropic.com/en/docs/about-claude/models -export const MAX_TOKENS = 8192; +export const MAX_TOKENS = 8000; // limits the number of model responses that can be returned in a single request export const MAX_RESPONSE_SEGMENTS = 2; diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts index 17881980d..9c792893a 100644 --- a/app/lib/.server/llm/model.ts +++ b/app/lib/.server/llm/model.ts @@ -6,6 +6,8 @@ import { createOpenAI } from '@ai-sdk/openai'; import { createGoogleGenerativeAI } from '@ai-sdk/google'; import { ollama } from 'ollama-ai-provider'; import { createOpenRouter } from "@openrouter/ai-sdk-provider"; +import { mistral } from '@ai-sdk/mistral'; +import { createMistral } from '@ai-sdk/mistral'; export function getAnthropicModel(apiKey: string, model: string) { const anthropic = createAnthropic({ @@ -30,6 +32,14 @@ export function getOpenAIModel(apiKey: string, model: string) { return openai(model); } +export function getMistralModel(apiKey: string, model: string) { + const mistral = createMistral({ + apiKey + }); + + return mistral(model); +} + export function getGoogleModel(apiKey: string, model: string) { const google = createGoogleGenerativeAI( apiKey, @@ -47,6 +57,14 @@ export function getGroqModel(apiKey: string, model: string) { return openai(model); } +export function getDeepseekModel(apiKey: string, model: string){ + const openai = createOpenAI({ + baseURL: 'https://api.deepseek.com/beta', + apiKey, + }); + + return openai(model); +} export function getOllamaModel(model: string) { return ollama(model); } @@ -76,6 +94,10 @@ export function getModel(provider: string, model: string, env: Env) { return getGoogleModel(apiKey, model) case 'OpenAILike': return getOpenAILikeModel(baseURL,apiKey, model); + case 'Deepseek': + return getDeepseekModel(apiKey, model) + case 'Mistral': + return getMistralModel(apiKey, model); default: return getOllamaModel(model); } diff --git a/app/lib/.server/llm/prompts.ts b/app/lib/.server/llm/prompts.ts index f78b41873..4749553e6 100644 --- a/app/lib/.server/llm/prompts.ts +++ b/app/lib/.server/llm/prompts.ts @@ -29,7 +29,32 @@ You are Bolt, an expert AI assistant and exceptional senior software developer w IMPORTANT: When choosing databases or npm packages, prefer options that don't rely on native binaries. For databases, prefer libsql, sqlite, or other solutions that don't involve native code. WebContainer CANNOT execute arbitrary native binaries. - Available shell commands: cat, chmod, cp, echo, hostname, kill, ln, ls, mkdir, mv, ps, pwd, rm, rmdir, xxd, alias, cd, clear, curl, env, false, getconf, head, sort, tail, touch, true, uptime, which, code, jq, loadenv, node, python3, wasm, xdg-open, command, exit, export, source + Available shell commands: + File Operations: + - cat: Display file contents + - cp: Copy files/directories + - ls: List directory contents + - mkdir: Create directory + - mv: Move/rename files + - rm: Remove files + - rmdir: Remove empty directories + - touch: Create empty file/update timestamp + + System Information: + - hostname: Show system name + - ps: Display running processes + - pwd: Print working directory + - uptime: Show system uptime + - env: Environment variables + + Development Tools: + - node: Execute Node.js code + - python3: Run Python scripts + - code: VSCode operations + - jq: Process JSON + + Other Utilities: + - curl, head, sort, tail, clear, which, export, chmod, scho, hostname, kill, ln, xxd, alias, false, getconf, true, loadenv, wasm, xdg-open, command, exit, source @@ -84,6 +109,36 @@ You are Bolt, an expert AI assistant and exceptional senior software developer w + + Before providing a solution, BRIEFLY outline your implementation steps. This helps ensure systematic thinking and clear communication. Your planning should: + - List concrete steps you'll take + - Identify key components needed + - Note potential challenges + - Be concise (2-4 lines maximum) + + Example responses: + + User: "Create a todo list app with local storage" + Assistant: "Sure. I'll start by: + 1. Set up Vite + React + 2. Create TodoList and TodoItem components + 3. Implement localStorage for persistence + 4. Add CRUD operations + + Let's start now. + + [Rest of response...]" + + User: "Help debug why my API calls aren't working" + Assistant: "Great. My first steps will be: + 1. Check network requests + 2. Verify API endpoint format + 3. Examine error handling + + [Rest of response...]" + + + Bolt creates a SINGLE, comprehensive artifact for each project. The artifact contains all necessary steps and components, including: diff --git a/app/lib/stores/workbench.ts b/app/lib/stores/workbench.ts index 4b85bc2e6..c42cc6275 100644 --- a/app/lib/stores/workbench.ts +++ b/app/lib/stores/workbench.ts @@ -11,6 +11,7 @@ import { PreviewsStore } from './previews'; import { TerminalStore } from './terminal'; import JSZip from 'jszip'; import { saveAs } from 'file-saver'; +import { Octokit } from "@octokit/rest"; export interface ArtifactState { id: string; @@ -280,21 +281,22 @@ export class WorkbenchStore { for (const [filePath, dirent] of Object.entries(files)) { if (dirent?.type === 'file' && !dirent.isBinary) { - // Remove '/home/project/' from the beginning of the path + // remove '/home/project/' from the beginning of the path const relativePath = filePath.replace(/^\/home\/project\//, ''); - // Split the path into segments + // split the path into segments const pathSegments = relativePath.split('/'); - // If there's more than one segment, we need to create folders + // if there's more than one segment, we need to create folders if (pathSegments.length > 1) { let currentFolder = zip; + for (let i = 0; i < pathSegments.length - 1; i++) { currentFolder = currentFolder.folder(pathSegments[i])!; } currentFolder.file(pathSegments[pathSegments.length - 1], dirent.content); } else { - // If there's only one segment, it's a file in the root + // if there's only one segment, it's a file in the root zip.file(relativePath, dirent.content); } } @@ -303,6 +305,140 @@ export class WorkbenchStore { const content = await zip.generateAsync({ type: 'blob' }); saveAs(content, 'project.zip'); } + + async syncFiles(targetHandle: FileSystemDirectoryHandle) { + const files = this.files.get(); + const syncedFiles = []; + + for (const [filePath, dirent] of Object.entries(files)) { + if (dirent?.type === 'file' && !dirent.isBinary) { + const relativePath = filePath.replace(/^\/home\/project\//, ''); + const pathSegments = relativePath.split('/'); + let currentHandle = targetHandle; + + for (let i = 0; i < pathSegments.length - 1; i++) { + currentHandle = await currentHandle.getDirectoryHandle(pathSegments[i], { create: true }); + } + + // create or get the file + const fileHandle = await currentHandle.getFileHandle(pathSegments[pathSegments.length - 1], { create: true }); + + // write the file content + const writable = await fileHandle.createWritable(); + await writable.write(dirent.content); + await writable.close(); + + syncedFiles.push(relativePath); + } + } + + return syncedFiles; + } + + async pushToGitHub(repoName: string, githubUsername: string, ghToken: string) { + + try { + // Get the GitHub auth token from environment variables + const githubToken = ghToken; + + const owner = githubUsername; + + if (!githubToken) { + throw new Error('GitHub token is not set in environment variables'); + } + + // Initialize Octokit with the auth token + const octokit = new Octokit({ auth: githubToken }); + + // Check if the repository already exists before creating it + let repo + try { + repo = await octokit.repos.get({ owner: owner, repo: repoName }); + } catch (error) { + if (error instanceof Error && 'status' in error && error.status === 404) { + // Repository doesn't exist, so create a new one + const { data: newRepo } = await octokit.repos.createForAuthenticatedUser({ + name: repoName, + private: false, + auto_init: true, + }); + repo = newRepo; + } else { + console.log('cannot create repo!'); + throw error; // Some other error occurred + } + } + + // Get all files + const files = this.files.get(); + if (!files || Object.keys(files).length === 0) { + throw new Error('No files found to push'); + } + + // Create blobs for each file + const blobs = await Promise.all( + Object.entries(files).map(async ([filePath, dirent]) => { + if (dirent?.type === 'file' && dirent.content) { + const { data: blob } = await octokit.git.createBlob({ + owner: repo.owner.login, + repo: repo.name, + content: Buffer.from(dirent.content).toString('base64'), + encoding: 'base64', + }); + return { path: filePath.replace(/^\/home\/project\//, ''), sha: blob.sha }; + } + }) + ); + + const validBlobs = blobs.filter(Boolean); // Filter out any undefined blobs + + if (validBlobs.length === 0) { + throw new Error('No valid files to push'); + } + + // Get the latest commit SHA (assuming main branch, update dynamically if needed) + const { data: ref } = await octokit.git.getRef({ + owner: repo.owner.login, + repo: repo.name, + ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch + }); + const latestCommitSha = ref.object.sha; + + // Create a new tree + const { data: newTree } = await octokit.git.createTree({ + owner: repo.owner.login, + repo: repo.name, + base_tree: latestCommitSha, + tree: validBlobs.map((blob) => ({ + path: blob!.path, + mode: '100644', + type: 'blob', + sha: blob!.sha, + })), + }); + + // Create a new commit + const { data: newCommit } = await octokit.git.createCommit({ + owner: repo.owner.login, + repo: repo.name, + message: 'Initial commit from your app', + tree: newTree.sha, + parents: [latestCommitSha], + }); + + // Update the reference + await octokit.git.updateRef({ + owner: repo.owner.login, + repo: repo.name, + ref: `heads/${repo.default_branch || 'main'}`, // Handle dynamic branch + sha: newCommit.sha, + }); + + alert(`Repository created and code pushed: ${repo.html_url}`); + } catch (error) { + console.error('Error pushing to GitHub:', error instanceof Error ? error.message : String(error)); + } + } } export const workbenchStore = new WorkbenchStore(); diff --git a/app/types/global.d.ts b/app/types/global.d.ts new file mode 100644 index 000000000..a1f6789de --- /dev/null +++ b/app/types/global.d.ts @@ -0,0 +1,3 @@ +interface Window { + showDirectoryPicker(): Promise; +} diff --git a/app/utils/constants.ts b/app/utils/constants.ts index 0a806741d..cb638ba32 100644 --- a/app/utils/constants.ts +++ b/app/utils/constants.ts @@ -1,4 +1,4 @@ -import type { ModelInfo } from './types'; +import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types'; export const WORK_DIR_NAME = 'project'; export const WORK_DIR = `/home/${WORK_DIR_NAME}`; @@ -10,6 +10,8 @@ export const DEFAULT_PROVIDER = 'Anthropic'; const staticModels: ModelInfo[] = [ { name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' }, { name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' }, + { name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', provider: 'OpenRouter' }, + { name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' }, { name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' }, { name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' }, { name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' }, @@ -30,18 +32,29 @@ const staticModels: ModelInfo[] = [ { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' }, { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' }, { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' }, + { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'}, + { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'}, + { name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' }, + { name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' }, + { name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' }, + { name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' }, + { name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' }, + { name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' }, + { name: 'ministral-small-latest', label: 'Mistral Small', provider: 'Mistral' }, + { name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' }, + { name: 'ministral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' }, ]; export let MODEL_LIST: ModelInfo[] = [...staticModels]; async function getOllamaModels(): Promise { try { - const base_url =import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434"; + const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434"; const url = new URL(base_url).toString(); const response = await fetch(`${url}/api/tags`); - const data = await response.json(); + const data = await response.json() as OllamaApiResponse; - return data.models.map((model: any) => ({ + return data.models.map((model: OllamaModel) => ({ name: model.name, label: `${model.name} (${model.details.parameter_size})`, provider: 'Ollama', diff --git a/app/utils/types.ts b/app/utils/types.ts index 7ace4e64d..5fcd2de25 100644 --- a/app/utils/types.ts +++ b/app/utils/types.ts @@ -8,7 +8,7 @@ interface OllamaModelDetails { quantization_level: string; } -interface OllamaModel { +export interface OllamaModel { name: string; model: string; modified_at: string; diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..21ef4c097 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,24 @@ +services: + bolt-app: + build: + context: . + dockerfile: Dockerfile + ports: + - "3000:3000" + environment: + - NODE_ENV=production + # Add any other environment variables your app needs + # - OPENAI_API_KEY=${OPENAI_API_KEY} + # - ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY} + # - GROQ_API_KEY=${GROQ_API_KEY} + # - OPEN_ROUTER_API_KEY=${OPEN_ROUTER_API_KEY} + volumes: + # This volume is for development purposes, allowing live code updates + # Comment out or remove for production + - .:/app + # This volume is to prevent node_modules from being overwritten by the above volume + - /app/node_modules + command: pnpm run start + +volumes: + node_modules: \ No newline at end of file diff --git a/package.json b/package.json index 737ca05cb..edb2b8dad 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,6 @@ "description": "StackBlitz AI Agent", "private": true, "license": "MIT", - "packageManager": "pnpm@9.4.0", "sideEffects": false, "type": "module", "scripts": { @@ -14,7 +13,7 @@ "test:watch": "vitest", "lint": "eslint --cache --cache-location ./node_modules/.cache/eslint .", "lint:fix": "npm run lint -- --fix", - "start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings", + "start": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 3000", "typecheck": "tsc", "typegen": "wrangler types", "preview": "pnpm run build && pnpm run start" @@ -26,6 +25,7 @@ "@ai-sdk/anthropic": "^0.0.39", "@ai-sdk/google": "^0.0.52", "@ai-sdk/openai": "^0.0.66", + "@ai-sdk/mistral": "^0.0.43", "@codemirror/autocomplete": "^6.17.0", "@codemirror/commands": "^6.6.0", "@codemirror/lang-cpp": "^6.0.2", @@ -45,6 +45,8 @@ "@iconify-json/svg-spinners": "^1.1.2", "@lezer/highlight": "^1.2.0", "@nanostores/react": "^0.7.2", + "@octokit/rest": "^21.0.2", + "@octokit/types": "^13.6.1", "@openrouter/ai-sdk-provider": "^0.0.5", "@radix-ui/react-dialog": "^1.1.1", "@radix-ui/react-dropdown-menu": "^2.1.1", @@ -94,6 +96,7 @@ "is-ci": "^3.0.1", "node-fetch": "^3.3.2", "prettier": "^3.3.2", + "sass-embedded": "^1.80.3", "typescript": "^5.5.2", "unified": "^11.0.5", "unocss": "^0.61.3", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 46dc9dbfc..f3a842460 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -17,6 +17,9 @@ importers: '@ai-sdk/google': specifier: ^0.0.52 version: 0.0.52(zod@3.23.8) + '@ai-sdk/mistral': + specifier: ^0.0.43 + version: 0.0.43(zod@3.23.8) '@ai-sdk/openai': specifier: ^0.0.66 version: 0.0.66(zod@3.23.8) @@ -77,6 +80,12 @@ importers: '@nanostores/react': specifier: ^0.7.2 version: 0.7.2(nanostores@0.10.3)(react@18.3.1) + '@octokit/rest': + specifier: ^21.0.2 + version: 21.0.2 + '@octokit/types': + specifier: ^13.6.1 + version: 13.6.1 '@openrouter/ai-sdk-provider': specifier: ^0.0.5 version: 0.0.5(zod@3.23.8) @@ -264,6 +273,12 @@ packages: peerDependencies: zod: ^3.0.0 + '@ai-sdk/mistral@0.0.43': + resolution: {integrity: sha512-YcneVvO57bbmseUmnvQaj6OolMj7/q1W/oeiFj1h+CJZsXIOX8P9i2Cmo2B7HMBbt73NIcvtyPze3GjaczZRqw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + '@ai-sdk/openai@0.0.66': resolution: {integrity: sha512-V4XeDnlNl5/AY3GB3ozJUjqnBLU5pK3DacKTbCNH3zH8/MggJoH6B8wRGdLUPVFMcsMz60mtvh4DC9JsIVFrKw==} engines: {node: '>=18'} @@ -1230,6 +1245,58 @@ packages: resolution: {integrity: sha512-gGq0NJkIGSwdbUt4yhdF8ZrmkGKVz9vAdVzpOfnom+V8PLSmSOVhZwbNvZZS1EYcJN5hzzKBxmmVVAInM6HQLg==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + '@octokit/auth-token@5.1.1': + resolution: {integrity: sha512-rh3G3wDO8J9wSjfI436JUKzHIxq8NaiL0tVeB2aXmG6p/9859aUOAjA9pmSPNGGZxfwmaJ9ozOJImuNVJdpvbA==} + engines: {node: '>= 18'} + + '@octokit/core@6.1.2': + resolution: {integrity: sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg==} + engines: {node: '>= 18'} + + '@octokit/endpoint@10.1.1': + resolution: {integrity: sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==} + engines: {node: '>= 18'} + + '@octokit/graphql@8.1.1': + resolution: {integrity: sha512-ukiRmuHTi6ebQx/HFRCXKbDlOh/7xEV6QUXaE7MJEKGNAncGI/STSbOkl12qVXZrfZdpXctx5O9X1AIaebiDBg==} + engines: {node: '>= 18'} + + '@octokit/openapi-types@22.2.0': + resolution: {integrity: sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==} + + '@octokit/plugin-paginate-rest@11.3.5': + resolution: {integrity: sha512-cgwIRtKrpwhLoBi0CUNuY83DPGRMaWVjqVI/bGKsLJ4PzyWZNaEmhHroI2xlrVXkk6nFv0IsZpOp+ZWSWUS2AQ==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/plugin-request-log@5.3.1': + resolution: {integrity: sha512-n/lNeCtq+9ofhC15xzmJCNKP2BWTv8Ih2TTy+jatNCCq/gQP/V7rK3fjIfuz0pDWDALO/o/4QY4hyOF6TQQFUw==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/plugin-rest-endpoint-methods@13.2.6': + resolution: {integrity: sha512-wMsdyHMjSfKjGINkdGKki06VEkgdEldIGstIEyGX0wbYHGByOwN/KiM+hAAlUwAtPkP3gvXtVQA9L3ITdV2tVw==} + engines: {node: '>= 18'} + peerDependencies: + '@octokit/core': '>=6' + + '@octokit/request-error@6.1.5': + resolution: {integrity: sha512-IlBTfGX8Yn/oFPMwSfvugfncK2EwRLjzbrpifNaMY8o/HTEAFqCA1FZxjD9cWvSKBHgrIhc4CSBIzMxiLsbzFQ==} + engines: {node: '>= 18'} + + '@octokit/request@9.1.3': + resolution: {integrity: sha512-V+TFhu5fdF3K58rs1pGUJIDH5RZLbZm5BI+MNF+6o/ssFNT4vWlCh/tVpF3NxGtP15HUxTTMUbsG5llAuU2CZA==} + engines: {node: '>= 18'} + + '@octokit/rest@21.0.2': + resolution: {integrity: sha512-+CiLisCoyWmYicH25y1cDfCrv41kRSvTq6pPWtRroRJzhsCZWZyCqGyI8foJT5LmScADSwRAnr/xo+eewL04wQ==} + engines: {node: '>= 18'} + + '@octokit/types@13.6.1': + resolution: {integrity: sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==} + '@openrouter/ai-sdk-provider@0.0.5': resolution: {integrity: sha512-AfxXQhISpxQSeUjU/4jo9waM5GRNX6eIkfTFS9l7vHkD1TKDP81Y/dXrE0ttJeN/Kap3tPF3Jwh49me0gWwjSw==} engines: {node: '>=18'} @@ -2211,6 +2278,9 @@ packages: resolution: {integrity: sha512-NF+epuEdnUYVlGuhaxbbq+dvJttwLnGY+YixlXlME5KpQ5W3CnXA5cVTneY3SPbPDRkcjMbifrwmFYcClgOZeg==} engines: {node: '>= 0.8'} + before-after-hook@3.0.2: + resolution: {integrity: sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==} + binary-extensions@2.3.0: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} @@ -4970,9 +5040,6 @@ packages: engines: {node: '>=14.17'} hasBin: true - ufo@1.5.3: - resolution: {integrity: sha512-Y7HYmWaFwPUmkoQCUIAYpKqkOf+SbVj/2fJJZ4RJMCfZp0rTGwRbzQD+HghfnhKOjL9E01okqz+ncJskGYfBNw==} - ufo@1.5.4: resolution: {integrity: sha512-UsUk3byDzKd04EyoZ7U4DOlxQaD14JUKQl6/P7wiX4FNvUfm3XL246n9W5AmqwW5RSFJ27NAuM0iLscAOYUiGQ==} @@ -5053,6 +5120,9 @@ packages: unist-util-visit@5.0.0: resolution: {integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==} + universal-user-agent@7.0.2: + resolution: {integrity: sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==} + universalify@2.0.1: resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} engines: {node: '>= 10.0.0'} @@ -5387,6 +5457,12 @@ snapshots: json-schema: 0.4.0 zod: 3.23.8 + '@ai-sdk/mistral@0.0.43(zod@3.23.8)': + dependencies: + '@ai-sdk/provider': 0.0.24 + '@ai-sdk/provider-utils': 1.0.20(zod@3.23.8) + zod: 3.23.8 + '@ai-sdk/openai@0.0.66(zod@3.23.8)': dependencies: '@ai-sdk/provider': 0.0.24 @@ -6363,6 +6439,67 @@ snapshots: dependencies: which: 3.0.1 + '@octokit/auth-token@5.1.1': {} + + '@octokit/core@6.1.2': + dependencies: + '@octokit/auth-token': 5.1.1 + '@octokit/graphql': 8.1.1 + '@octokit/request': 9.1.3 + '@octokit/request-error': 6.1.5 + '@octokit/types': 13.6.1 + before-after-hook: 3.0.2 + universal-user-agent: 7.0.2 + + '@octokit/endpoint@10.1.1': + dependencies: + '@octokit/types': 13.6.1 + universal-user-agent: 7.0.2 + + '@octokit/graphql@8.1.1': + dependencies: + '@octokit/request': 9.1.3 + '@octokit/types': 13.6.1 + universal-user-agent: 7.0.2 + + '@octokit/openapi-types@22.2.0': {} + + '@octokit/plugin-paginate-rest@11.3.5(@octokit/core@6.1.2)': + dependencies: + '@octokit/core': 6.1.2 + '@octokit/types': 13.6.1 + + '@octokit/plugin-request-log@5.3.1(@octokit/core@6.1.2)': + dependencies: + '@octokit/core': 6.1.2 + + '@octokit/plugin-rest-endpoint-methods@13.2.6(@octokit/core@6.1.2)': + dependencies: + '@octokit/core': 6.1.2 + '@octokit/types': 13.6.1 + + '@octokit/request-error@6.1.5': + dependencies: + '@octokit/types': 13.6.1 + + '@octokit/request@9.1.3': + dependencies: + '@octokit/endpoint': 10.1.1 + '@octokit/request-error': 6.1.5 + '@octokit/types': 13.6.1 + universal-user-agent: 7.0.2 + + '@octokit/rest@21.0.2': + dependencies: + '@octokit/core': 6.1.2 + '@octokit/plugin-paginate-rest': 11.3.5(@octokit/core@6.1.2) + '@octokit/plugin-request-log': 5.3.1(@octokit/core@6.1.2) + '@octokit/plugin-rest-endpoint-methods': 13.2.6(@octokit/core@6.1.2) + + '@octokit/types@13.6.1': + dependencies: + '@octokit/openapi-types': 22.2.0 + '@openrouter/ai-sdk-provider@0.0.5(zod@3.23.8)': dependencies: '@ai-sdk/provider': 0.0.12 @@ -7530,6 +7667,8 @@ snapshots: safe-buffer: 5.1.2 optional: true + before-after-hook@3.0.2: {} + binary-extensions@2.3.0: {} binaryextensions@6.11.0: @@ -9972,7 +10111,7 @@ snapshots: dependencies: destr: 2.0.3 node-fetch-native: 1.6.4 - ufo: 1.5.3 + ufo: 1.5.4 ollama-ai-provider@0.15.2(zod@3.23.8): dependencies: @@ -11002,8 +11141,6 @@ snapshots: typescript@5.5.2: {} - ufo@1.5.3: {} - ufo@1.5.4: {} unconfig@0.3.13: @@ -11122,6 +11259,8 @@ snapshots: unist-util-is: 6.0.0 unist-util-visit-parents: 6.0.1 + universal-user-agent@7.0.2: {} + universalify@2.0.1: {} unocss@0.61.3(postcss@8.4.38)(rollup@4.18.0)(vite@5.3.1(@types/node@20.14.9)(sass@1.77.6)): diff --git a/vite.config.ts b/vite.config.ts index 1afe2b117..625390702 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -28,6 +28,13 @@ export default defineConfig((config) => { config.mode === 'production' && optimizeCssModules({ apply: 'build' }), ], envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"], + css: { + preprocessorOptions: { + scss: { + api: 'modern-compiler', + }, + }, + }, }; }); diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts index dcca11908..82961ecd6 100644 --- a/worker-configuration.d.ts +++ b/worker-configuration.d.ts @@ -6,4 +6,5 @@ interface Env { OLLAMA_API_BASE_URL: string; OPENAI_LIKE_API_KEY: string; OPENAI_LIKE_API_BASE_URL: string; + DEEPSEEK_API_KEY: string; }