diff --git a/.env.example b/.env.example index 2f4957a2b5..a2f07f1480 100644 --- a/.env.example +++ b/.env.example @@ -53,6 +53,8 @@ TWITTER_EMAIL= # Account email TWITTER_2FA_SECRET= TWITTER_COOKIES= # Account cookies TWITTER_POLL_INTERVAL=120 # How often (in seconds) the bot should check for interactions +TWITTER_SEARCH_ENABLE=FALSE # Enable timeline search, WARNING this greatly increases your chance of getting banned +TWITTER_TARGET_USERS= # Comma separated list of Twitter user names to interact with X_SERVER_URL= XAI_API_KEY= XAI_MODEL= @@ -62,6 +64,10 @@ POST_INTERVAL_MIN= # Default: 90 POST_INTERVAL_MAX= # Default: 180 POST_IMMEDIATELY= +# Twitter action processing configuration +ACTION_INTERVAL=300000 # Interval in milliseconds between action processing runs (default: 5 minutes) +ENABLE_ACTION_PROCESSING=false # Set to true to enable the action processing loop + # Feature Flags IMAGE_GEN= # Set to TRUE to enable image generation USE_OPENAI_EMBEDDING= # Set to TRUE for OpenAI/1536, leave blank for local @@ -244,6 +250,13 @@ INTERNET_COMPUTER_ADDRESS= APTOS_PRIVATE_KEY= # Aptos private key APTOS_NETWORK= # must be one of mainnet, testnet +# EchoChambers Configuration +ECHOCHAMBERS_API_URL=http://127.0.0.1:3333 +ECHOCHAMBERS_API_KEY=testingkey0011 +ECHOCHAMBERS_USERNAME=eliza +ECHOCHAMBERS_DEFAULT_ROOM=general +ECHOCHAMBERS_POLL_INTERVAL=60 +ECHOCHAMBERS_MAX_MESSAGES=10 # AWS S3 Configuration Settings for File Upload AWS_ACCESS_KEY_ID= diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 535617fb44..58c6cdcc55 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -34,7 +34,7 @@ jobs: echo "NODE_ENV=test" >> packages/core/.env.test - name: Run tests - run: cd packages/core && pnpm test + run: cd packages/core && pnpm test:coverage - name: Build packages run: pnpm run build diff --git a/.github/workflows/integrationTests.yaml b/.github/workflows/integrationTests.yaml new file mode 100644 index 0000000000..cd9441507d --- /dev/null +++ b/.github/workflows/integrationTests.yaml @@ -0,0 +1,55 @@ +name: integration-test +on: + push: + branches: + - "*" + pull_request: + branches: + - "*" +jobs: + smoke-tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: pnpm/action-setup@v3 + with: + version: 9.4.0 + + - uses: actions/setup-node@v4 + with: + node-version: "23" + cache: "pnpm" + + - name: Run smoke tests + run: pnpm run smokeTests + integration-tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: pnpm/action-setup@v3 + with: + version: 9.4.0 + + - uses: actions/setup-node@v4 + with: + node-version: "23" + cache: "pnpm" + + - name: Install dependencies + run: pnpm install -r + + - name: Build packages + run: pnpm build + + - name: Run integration tests + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + run: | + if [ -z "$OPENAI_API_KEY" ]; then + echo "Skipping integration tests due to missing required API keys" + exit 1 + else + pnpm run integrationTests + fi diff --git a/.gitignore b/.gitignore index b3d84f00fb..17300ccdb7 100644 --- a/.gitignore +++ b/.gitignore @@ -47,3 +47,5 @@ packages/plugin-coinbase/package-lock.json tsup.config.bundled_*.mjs .turbo + +coverage \ No newline at end of file diff --git a/README.md b/README.md index b87419cfad..dad5e3ebd1 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ git clone https://github.com/ai16z/eliza-starter.git cp .env.example .env -pnpm i && pnpm start +pnpm i && pnpm build && pnpm start ``` Then read the [Documentation](https://ai16z.github.io/eliza/) to learn how to customize your Eliza. @@ -90,7 +90,7 @@ sh scripts/start.sh ### Edit the character file -1. Open `agent/src/character.ts` to modify the default character. Uncomment and edit. +1. Open `packages/core/src/defaultCharacter.ts` to modify the default character. Uncomment and edit. 2. To load custom characters: - Use `pnpm start --characters="path/to/your/character.json"` diff --git a/agent/src/index.ts b/agent/src/index.ts index 8aab1b5d38..bb631a8136 100644 --- a/agent/src/index.ts +++ b/agent/src/index.ts @@ -60,6 +60,12 @@ export const wait = (minTime: number = 1000, maxTime: number = 3000) => { return new Promise((resolve) => setTimeout(resolve, waitTime)); }; +const logFetch = async (url: string, options: any) => { + elizaLogger.info(`Fetching ${url}`); + elizaLogger.info(options); + return fetch(url, options); +}; + export function parseArguments(): { character?: string; characters?: string; @@ -335,6 +341,7 @@ export async function initializeClients( } if (clientTypes.includes("twitter")) { + TwitterClientInterface.enableSearch = !isFalsish(getSecret(character, "TWITTER_SEARCH_ENABLE")); const twitterClients = await TwitterClientInterface.start(runtime); clients.push(twitterClients); } @@ -358,6 +365,22 @@ export async function initializeClients( return clients; } +function isFalsish(input: any): boolean { + // If the input is exactly NaN, return true + if (Number.isNaN(input)) { + return true; + } + + // Convert input to a string if it's not null or undefined + const value = input == null ? '' : String(input); + + // List of common falsish string representations + const falsishValues = ['false', '0', 'no', 'n', 'off', 'null', 'undefined', '']; + + // Check if the value (trimmed and lowercased) is in the falsish list + return falsishValues.includes(value.trim().toLowerCase()); +} + function getSecret(character: Character, secret: string) { return character.settings.secrets?.[secret] || process.env[secret]; } @@ -456,6 +479,7 @@ export async function createAgent( services: [], managers: [], cacheManager: cache, + fetch: logFetch, }); } diff --git a/docs/community/Streams/12-2024/2024-12-10.md b/docs/community/Streams/12-2024/2024-12-10.md new file mode 100644 index 0000000000..51afc2133f --- /dev/null +++ b/docs/community/Streams/12-2024/2024-12-10.md @@ -0,0 +1,94 @@ +--- +sidebar_position: 4 +title: "AI Agent Dev School Part 4" +description: "AI Pizza: Hacking Eliza for Domino's Delivery (plus TEE Deep Dive)" +--- + +# AI Agent Dev School Part 4 + +**AI Pizza: Hacking Eliza for Domino's Delivery (plus TEE Deep Dive)** + +Date: 2024-12-10 +YouTube Link: https://www.youtube.com/watch?v=6I9e9pJprDI + +## Timestamps + +Part 1: Trusted Execution Environments (TEEs) with Agent Joshua +- **00:00:09** - Stream starts, initial setup issues. +- **00:01:58** - Intro to Trusted Execution Environments (TEEs). +- **00:08:03** - Agent Joshua begins explaining TEEs and the Eliza plugin. +- **00:19:15** - Deeper dive into remote attestation. +- **00:24:50** - Discussion of derived keys. +- **00:37:00** - Deploying to a real TEE, Phala Network's TEE cloud. +- **00:50:48** - Q&A with Joshua, contact info, and next steps. + +Part 2: Building a Domino's pizza ordering agent +- **01:04:37** - Transition to building a Domino's pizza ordering agent. +- **01:14:20** - Discussion of the pizza ordering agent’s order flow and state machine. +- **01:22:07** - Using Claude to generate a state machine diagram. +- **01:32:17** - Creating the Domino's plugin in Eliza. +- **01:54:15** - Working on the pizza order provider. +- **02:16:46** - Pizza provider code completed. +- **02:28:50** - Discussion of caching customer and order data. +- **03:13:45** - Pushing fixes to main branch and continuing work on the agent. +- **04:24:30** - Discussion of summarizing past agent dev school sessions. +- **05:01:18** - Shaw returns, admits to ordering Domino's manually. +- **05:09:00** - Discussing payment flow and a confirm order action. +- **05:27:17** - Final code push, wrap-up, and end of stream. + + +## Summary + +This is a livestream titled "AI Agent Dev School Part 4" from the ai16z project, featuring Shaw. The stream is divided into two main parts: a technical discussion on Trusted Execution Environments (TEEs) and a coding session where Shaw attempts to build a pizza-ordering agent using the Domino's API. + +**Part 1: Trusted Execution Environments (TEEs) with Agent Joshua** + +This segment begins with Shaw introducing the concept of TEEs and their importance for running autonomous agents securely. He emphasizes the need to protect private keys and ensure that code execution is tamper-proof. Joshua from the Phala Network is brought on to explain TEEs in more detail and demonstrate how to use the TEE plugin he built for Eliza. + +* **Key Concepts:** + * **Trusted Execution Environments (TEEs):** Secure areas within a processor that isolate code and data, protecting them from unauthorized access and tampering. + * **Secure Enclave:** A cryptographic primitive that allows data to be encrypted and isolated within a processor. + * **Remote Attestation:** A method to verify that a program running inside a TEE is genuine and hasn't been tampered with, providing verifiability to users. + * **D-Stack:** An SDK developed in collaboration with Flashbots and Andrew Miller, enabling developers to build and launch Docker containers in TEEs. + * **Derived Key Provider:** A component that generates cryptographic keys based on a secret salt, ensuring that private keys are not exposed to humans. + +* **Demonstration:** + * Joshua walks through the process of setting up and deploying an Eliza agent in a TEE simulator, demonstrating how to generate remote attestations and derive keys. + * He shows how to use the remote attestation explorer to verify the authenticity of the agent running inside the TEE. + * He explains how to build a Docker image of the agent and deploy it to the Phala Network's TEE cloud solution. + +* **Use Cases:** + * Securely storing private keys for on-chain actions. + * Ensuring the integrity of autonomous agents, preventing tampering or unauthorized access. + * Providing verifiable execution for users and investors. + +* **Phala Network's TEE Cloud:** + * Joshua introduces Phala Network's TEE cloud solution, which allows developers to deploy Docker images and host their agents in a trusted execution environment. + * He mentions that the service supports various compute-intensive applications beyond AI agents. + * He invites interested developers to contact him on Discord (@hashwarlock) for onboarding and further details. + +**Part 2: Building a Pizza Ordering Agent** + +In the second part, Shaw transitions to a more lighthearted coding session where he attempts to build an agent that can order a pizza using the Domino's API. He highlights the challenges of handling payments securely and connecting user information to the conversation. + +* **Challenges:** + * Securely handling payment information. + * Connecting user data to the current conversation. + * Defining the order flow using a state machine. + +* **Approach:** + * Shaw uses a state machine to model the pizza ordering process, defining different states and transitions based on user input and available information. + * He uses Claude (an AI assistant) to generate code snippets and assist with the development process. + * He decides to initially focus on a simplified version where the user's payment information is hardcoded in the environment variables, and the agent only needs to collect the user's address. + +## Hot Takes + +1. **"Maybe we'll mix it on LinkedIn so people can order Domino's on LinkedIn. There you go. Now we're cooking." (00:03:26)** - Shaw's seemingly flippant idea of ordering pizza on LinkedIn highlights the potential for integrating everyday services into unexpected platforms through agents. This sparked discussion about the wider implications for businesses and social media. + +2. **"Yeah, it'll probably get drained real quick. These fucking people." (00:28:30)** - Shaw accidentally leaked an API key on stream and expressed frustration with viewers who noticed, exposing the real-world risks of handling sensitive information during development, especially in a live environment. + +3. **"The secret to making a billion dollars is to use the existing agent framework to deliver apps to people on social media that they want." (01:09:35)** - Shaw’s strong assertion about focusing on building apps *using* existing frameworks rather than creating new ones is a bold statement about the current agent development landscape, suggesting that innovation lies in application development, not framework creation. + +4. **"So those are like, honest to God, if the bots are better than like 70% of tweets on Twitter, they're better than like 99.7 tweets and posts on LinkedIn." (01:39:57)** - This provocative comparison of content quality between Twitter and LinkedIn, suggesting bots surpass most LinkedIn posts, fueled lively debate in the chat and raised questions about the role and value of human-generated content in the age of AI. + +5. **"I subliminally messaged Domino's into my own brain, and now I have to eat it." (05:01:24)** - After hours of working on the pizza bot, Shaw abandoned the live coding attempt and ordered pizza manually, a humorous but relatable moment that highlighted the challenges and frustrations of software development, even when aided by AI. It also underscores the human desire for immediate gratification, even in the face of a potentially groundbreaking technological advancement. diff --git a/docs/community/components/Accordion.tsx b/docs/community/components/Accordion.tsx new file mode 100644 index 0000000000..9b4748089e --- /dev/null +++ b/docs/community/components/Accordion.tsx @@ -0,0 +1,183 @@ +import React, { useState, useRef, useEffect } from "react"; +import { GitHubItem } from "./Contributions"; +import { GITHUB_PAGE_LIMIT } from "./Contributors"; + +interface AccordionProps { + title: string; + isOpen: boolean; + onToggle: () => void; + data: GitHubItem[]; + loadMore?: () => void; + total_count: number; + primaryText?: string; + secondaryText?: string; + mainBackgroundColor?: string; +} + +export const Accordion: React.FC = ({ + title, + isOpen, + onToggle, + data, + loadMore, + total_count, + primaryText, + secondaryText, + mainBackgroundColor, +}) => { + const [hoveredIndex, setHoveredIndex] = useState(null); + const [hoverLoadMore, setHoverLoadMore] = useState(false); + const [maxHeight, setMaxHeight] = useState( + isOpen ? "1000px" : "0px", + ); + + const contentRef = useRef(null); + + React.useEffect(() => { + setMaxHeight(isOpen ? "1000px" : "0px"); + }, [isOpen]); + + useEffect(() => { + if (contentRef.current && data.length > GITHUB_PAGE_LIMIT) { + contentRef.current.scrollTo({ + top: contentRef.current.scrollHeight, + behavior: "smooth", + }); + } + }, [data]); + + return ( +
+
+
{title}
+
+ {"▶"} +
+
+
+
+ {data.map((entry, index) => ( +
+
setHoveredIndex(index)} + onMouseLeave={() => setHoveredIndex(null)} + onClick={() => + window.open( + entry.html_url, + "_blank", + "noopener,noreferrer", + ) + } + > +
+ {entry.bullet && ( +
+ )} +
{entry.title}
+
+
+ {entry.created_at.split("T")[0]} +
+
+
+ ))} +
+
+ {isOpen && loadMore && data.length < total_count && ( +
+ setHoverLoadMore(true)} + onMouseLeave={() => setHoverLoadMore(false)} + onClick={loadMore} + > + Load more + +
+ )} +
+ ); +}; diff --git a/docs/community/components/Contributions.tsx b/docs/community/components/Contributions.tsx new file mode 100644 index 0000000000..39fed8bbbe --- /dev/null +++ b/docs/community/components/Contributions.tsx @@ -0,0 +1,363 @@ +import React, { useState, useEffect } from "react"; +import { Accordion } from "./Accordion"; +import { StatCard } from "./StatCard"; +import { THEME_COLORS } from "./Contributors"; +import { hexToRgb, useGithubAccessToken } from "./utils"; +import ScoreIcon from "./ScoreIcon"; +import Summary from "./Summary"; +import Hero from "./Hero"; + +export interface GitHubItem { + html_url: string; + title: string; + created_at: string; + bullet?: string; +} + +export interface StatCardProps { + title: string; + value: number; + style?: React.CSSProperties; +} + +export interface AccordionItem { + data: GitHubItem[]; + total_count: number; + state?: string; +} + +export enum BULLET_COLOR { + OPEN = "#1A7F37", + CLOSE = "#D1242F", + MERGE = "#8250DF", +} + +const initializeAccordionItem = (): AccordionItem => ({ + data: [], + total_count: 0, +}); + +const Contributions = ({ + contributor, + onBack, + darkMode, + activitySummary, + score, +}) => { + const githubAccessToken = useGithubAccessToken(); + const [commitsData, setCommitsData] = useState( + initializeAccordionItem(), + ); + const [prsData, setPrsData] = useState( + initializeAccordionItem(), + ); + const [issuesData, setIssuesData] = useState( + initializeAccordionItem(), + ); + const [openAccordion, setOpenAccordion] = useState(null); + + const [commitPage, setCommitPage] = useState(1); + const [prPage, sePrPage] = useState(1); + const [issuePage, setIssuePage] = useState(1); + + useEffect(() => { + const fetchContributorStats = async () => { + try { + await fetchCommits(commitPage); + await fetchPRs(prPage); + await fetchIssues(issuePage); + } catch (error) { + console.error("Error fetching contributor stats:", error); + } + }; + + fetchContributorStats(); + }, [contributor.login]); + + const toggleAccordion = (section: string) => { + setOpenAccordion((prev) => (prev === section ? null : section)); + }; + + const fetchCommits = async (page: number) => { + try { + const commitResponse = await fetch( + `https://api.github.com/repos/ai16z/eliza/commits?author=${contributor.login}&page=${page}`, + { + method: "GET", + headers: { + Authorization: `token ${githubAccessToken}`, + Accept: "application/vnd.github.v3+json", + }, + }, + ); + const commitData = await commitResponse.json(); + const commitItems = commitData.map((commit: any) => ({ + html_url: commit.html_url, + title: commit.commit.message, + created_at: commit.commit.author.date, + })); + const currentCommitsData = [...commitsData.data, ...commitItems]; + setCommitsData({ + data: currentCommitsData, + total_count: contributor.contributions, + }); + } catch (error) { + console.error("Error fetching commits:", error); + } + }; + + const fetchPRs = async (page: number) => { + try { + const prResponse = await fetch( + `https://api.github.com/search/issues?q=type:pr+author:${contributor.login}+repo:ai16z/eliza&page=${page}`, + { + method: "GET", + headers: { + Authorization: `token ${githubAccessToken}`, + Accept: "application/vnd.github.v3+json", + }, + }, + ); + const prData = await prResponse.json(); + const prItems = prData.items.map((pr: any) => ({ + html_url: pr.html_url, + title: pr.title, + created_at: pr.created_at, + bullet: + pr.state === "open" + ? BULLET_COLOR.OPEN + : pr.pull_request.merged_at + ? BULLET_COLOR.MERGE + : BULLET_COLOR.CLOSE, + })); + const currentPrsData = [...prsData.data, ...prItems]; + + setPrsData({ + data: currentPrsData, + total_count: prData.total_count, + }); + } catch (error) { + console.error("Error fetching PRs:", error); + } + }; + + const fetchIssues = async (page: number) => { + try { + const issueResponse = await fetch( + `https://api.github.com/search/issues?q=type:issue+author:${contributor.login}+repo:ai16z/eliza&page=${page}`, + { + method: "GET", + headers: { + Authorization: `token ${githubAccessToken}`, + Accept: "application/vnd.github.v3+json", + }, + }, + ); + const issueData = await issueResponse.json(); + const issueItems = issueData.items.map((issue: any) => ({ + html_url: issue.html_url, + title: issue.title, + created_at: issue.created_at, + bullet: + issue.state === "open" + ? BULLET_COLOR.OPEN + : BULLET_COLOR.CLOSE, + })); + const currentIssuesData = [...issuesData.data, ...issueItems]; + setIssuesData({ + data: currentIssuesData, + total_count: issueData.total_count, + }); + } catch (error) { + console.error("Error fetching issues:", error); + } + }; + + const accordionItems = [ + { + title: "Commits", + data: commitsData, + section: "commits", + loadMore: () => { + const nextPage = commitPage + 1; + fetchCommits(nextPage); + setCommitPage(nextPage); + }, + }, + { + title: "Pull Requests", + data: prsData, + section: "pullRequests", + loadMore: () => { + const nextPage = prPage + 1; + fetchPRs(nextPage); + sePrPage(nextPage); + }, + }, + { + title: "Issues", + data: issuesData, + section: "issues", + loadMore: () => { + const nextPage = issuePage + 1; + fetchIssues(nextPage); + setIssuePage(nextPage); + }, + }, + ]; + + return ( +
+
+ + + back + +
+
+ + +
+ + + +
+ {accordionItems.map((stat, index) => ( + + ))} +
+
+ {accordionItems.map((item) => ( + toggleAccordion(item.section)} + data={item.data.data} + loadMore={item.loadMore} + total_count={item.data.total_count} + primaryText={ + darkMode + ? THEME_COLORS.dark.primaryText + : THEME_COLORS.light.primaryText + } + secondaryText={ + darkMode + ? THEME_COLORS.dark.secondaryText + : THEME_COLORS.light.secondaryText + } + mainBackgroundColor={ + darkMode + ? THEME_COLORS.dark.mainBackgroundColor + : THEME_COLORS.light.mainBackgroundColor + } + /> + ))} +
+
+ ); +}; + +export default Contributions; diff --git a/docs/community/components/Contributor.tsx b/docs/community/components/Contributor.tsx new file mode 100644 index 0000000000..45efcb551e --- /dev/null +++ b/docs/community/components/Contributor.tsx @@ -0,0 +1,96 @@ +import React, { useState } from "react"; +import { ContributorProps } from "./Contributors"; +import { THEME_COLORS } from "./Contributors"; +import { hexToRgb } from "./utils"; +import ScoreIcon from "./ScoreIcon"; +import Summary from "./Summary"; +import Hero from "./Hero"; + +const ContributorCard: React.FC = ({ + contributor, + onSelect, + darkMode, + activitySummary, + score, +}) => { + const [isHovered, setIsHovered] = useState(false); + + return ( +
setIsHovered(true)} + onMouseLeave={() => setIsHovered(false)} + onClick={onSelect} + > + + + +
+ ); +}; + +export default ContributorCard; diff --git a/docs/community/components/Contributors.tsx b/docs/community/components/Contributors.tsx new file mode 100644 index 0000000000..ecbd730f68 --- /dev/null +++ b/docs/community/components/Contributors.tsx @@ -0,0 +1,233 @@ +import React, { useEffect, useState, useRef } from "react"; +import ContributorCard from "./Contributor"; +import Contributions from "./Contributions"; +import { useColorMode } from "@docusaurus/theme-common"; +import contributorsSpec from "../contributors.json"; +import { useGithubAccessToken } from "./utils"; + +export interface Contributor { + id: number; + login: string; + avatar_url: string; + html_url: string; + contributions: number; +} + +export interface ContributorProps { + contributor: Contributor; + onSelect: () => void; + darkMode: boolean; + activitySummary?: string; + score?: number; +} + +export const THEME_COLORS = { + light: { + mainBackgroundColor: "#ffffff", + secondaryBackground: "rgba(0, 0, 0, 0.05)", + primaryText: "#000000", + secondaryText: "#ffa600", + }, + dark: { + mainBackgroundColor: "#1b1b1d", + secondaryBackground: "#242526", + primaryText: "#ffffff", + secondaryText: "#add8e6", + }, +}; + +export interface ActivityDetails { + score: number; + activitySummary: string; +} + +export const GITHUB_PAGE_LIMIT = 30; // The maximum number to fetch per page from the GitHub API. + +const Contributors: React.FC = () => { + const githubAccessToken = useGithubAccessToken(); + const { colorMode } = useColorMode(); + const [selectedContributor, setSelectedContributor] = + useState(null); + const [contributors, setContributors] = useState([]); + const [error, setError] = useState(null); + const [darkMode, setDarkMode] = useState(colorMode === "dark"); + const [hasMore, setHasMore] = useState(true); + const [activitySummaries, setActivitySummaries] = useState< + Map + >(new Map()); + + const observerRef = useRef(null); + const pageRef = useRef(1); + const loadingRef = useRef(true); + + useEffect(() => { + setDarkMode(colorMode === "dark"); + }, [colorMode]); + + const fetchContributors = async (page: number) => { + loadingRef.current = true; + try { + const response = await fetch( + `https://api.github.com/repos/ai16z/eliza/contributors?per_page=${GITHUB_PAGE_LIMIT}&page=${page}`, + { + method: "GET", + headers: { + Authorization: `token ${githubAccessToken}`, + Accept: "application/vnd.github.v3+json", + }, + }, + ); + if (!response.ok) { + throw new Error( + `Error fetching contributors: ${response.statusText}`, + ); + } + const data: Contributor[] = await response.json(); + if (data.length === 0) { + setHasMore(false); + return; + } + const currentContributors = [...contributors, ...data]; + + setContributors(currentContributors); + } catch (err) { + setError(err instanceof Error ? err.message : "Unknown error"); + } finally { + loadingRef.current = false; + } + }; + + useEffect(() => { + const fetchActivitySummaries = async () => { + try { + const response = await fetch( + "https://ai16z.github.io/data/contributors.json", + ); + if (!response.ok) { + throw new Error( + `Error fetching activity summaries: ${response.statusText}`, + ); + } + const specs = await response.json(); + + const currentActivitySummaries = new Map< + string, + ActivityDetails + >(); + specs.forEach( + (spec: { + contributor: string; + score: number; + summary: string; + }) => { + currentActivitySummaries.set(spec.contributor, { + score: spec.score, + activitySummary: spec.summary, + }); + }, + ); + setActivitySummaries(currentActivitySummaries); + } catch (err) { + console.log("Unknown error while fetching summaries"); + } + }; + + fetchActivitySummaries(); + fetchContributors(pageRef.current); + }, []); + + useEffect(() => { + const observer = new IntersectionObserver( + (entries) => { + if ( + entries[0].isIntersecting && + !loadingRef.current && + hasMore + ) { + loadingRef.current = true; + pageRef.current++; + fetchContributors(pageRef.current); + } + }, + { threshold: 1.0 }, + ); + + if (observerRef.current) { + observer.observe(observerRef.current); + } + + return () => { + if (observerRef.current) { + observer.unobserve(observerRef.current); + } + }; + }, [contributors, hasMore, selectedContributor]); + + if (error) { + return
Error: {error}
; + } + + if (!contributors.length) { + return
Loading...
; + } + + return ( +
+ {selectedContributor ? ( + setSelectedContributor(null)} + darkMode={darkMode} + activitySummary={ + activitySummaries.get(selectedContributor.login) + ?.activitySummary + } + score={ + activitySummaries.get(selectedContributor.login)?.score + } + /> + ) : ( + <> + {contributors.map((contributor) => ( + { + setSelectedContributor(contributor); + }} + darkMode={darkMode} + activitySummary={ + activitySummaries.get(contributor.login) + ?.activitySummary + } + score={ + activitySummaries.get(contributor.login)?.score + } + /> + ))} +
+ {hasMore &&
Loading more...
} + + )} +
+ ); +}; + +export default Contributors; diff --git a/docs/community/components/Hero.tsx b/docs/community/components/Hero.tsx new file mode 100644 index 0000000000..4575fd4080 --- /dev/null +++ b/docs/community/components/Hero.tsx @@ -0,0 +1,47 @@ +import React from "react"; + +export default function Hero({ + contributor, + secondaryText, + profilePictureSize, +}) { + return ( +
+ {`${contributor.login}'s +
+
+ {contributor.login} +
+ { +
+ {contributor.contributions} contributions +
+ } +
+
+ ); +} diff --git a/docs/community/components/ScoreIcon.tsx b/docs/community/components/ScoreIcon.tsx new file mode 100644 index 0000000000..fcf5ae325c --- /dev/null +++ b/docs/community/components/ScoreIcon.tsx @@ -0,0 +1,32 @@ +import React from "react"; + +export default function ScoreIcon({ style, iconColor, iconSize, score }) { + function Flash({ size, fill }) { + return ( + + + + ); + } + + return ( +
+ +
{typeof score === "number" ? score : "NULL"}
+
+ ); +} diff --git a/docs/community/components/StatCard.tsx b/docs/community/components/StatCard.tsx new file mode 100644 index 0000000000..d812de946f --- /dev/null +++ b/docs/community/components/StatCard.tsx @@ -0,0 +1,22 @@ +import React from "react"; +import { StatCardProps } from "./Contributions"; + +export const StatCard: React.FC = ({ title, value, style }) => { + return ( +
+
{title}
+
{value}
+
+ ); +}; diff --git a/docs/community/components/Summary.tsx b/docs/community/components/Summary.tsx new file mode 100644 index 0000000000..b9a0a90636 --- /dev/null +++ b/docs/community/components/Summary.tsx @@ -0,0 +1,13 @@ +import React from "react"; + +export default function Summary({ summary, style }) { + return ( +
+ {summary || "No summary available"} +
+ ); +} diff --git a/docs/community/components/utils.tsx b/docs/community/components/utils.tsx new file mode 100644 index 0000000000..bd660f5959 --- /dev/null +++ b/docs/community/components/utils.tsx @@ -0,0 +1,15 @@ +import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; + +export function hexToRgb(hex: string) { + hex = hex.replace("#", ""); + const bigint = parseInt(hex, 16); + const r = (bigint >> 16) & 255; + const g = (bigint >> 8) & 255; + const b = bigint & 255; + return `${r}, ${g}, ${b}`; +} + +export function useGithubAccessToken() { + const { siteConfig } = useDocusaurusContext(); + return siteConfig.customFields.GITHUB_ACCESS_TOKEN; +} diff --git a/docs/community/profiles.mdx b/docs/community/profiles.mdx index 28224acecd..5135aede38 100644 --- a/docs/community/profiles.mdx +++ b/docs/community/profiles.mdx @@ -1,17 +1,10 @@ --- -title: GitHub Contributors +title: GitHub Contributors description: GitHub contributors to our project --- -# GitHub Contributors +import Contributors from "./components/Contributors"; -This is a quick and dirty implementation of profiles that are programmatically generated from github data from `ai16z/eliza` repo. I'm looking for some help to integrate into Docusaurus as react components. See the code for generating profiles here: https://github.com/ai16z/ai16z.github.io +# GitHub Contributors -