Skip to content

Commit

Permalink
fix/feat: improved graph and updated api to allow fetch from webpage
Browse files Browse the repository at this point in the history
  • Loading branch information
ptdewey committed Nov 24, 2024
1 parent abc54e7 commit 3c5e591
Show file tree
Hide file tree
Showing 10 changed files with 123 additions and 112 deletions.
28 changes: 0 additions & 28 deletions graph-2d.html

This file was deleted.

35 changes: 0 additions & 35 deletions graph-3d.html

This file was deleted.

54 changes: 39 additions & 15 deletions index.html
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
</style>

<script src="//unpkg.com/3d-force-graph"></script>
<!--<script src="../../dist/3d-force-graph.js"></script>-->
</head>

<body>
Expand All @@ -16,20 +15,45 @@
<script type="module">
import SpriteText from "//unpkg.com/three-spritetext/dist/three-spritetext.mjs";

const Graph = ForceGraph3D()
(document.getElementById('3d-graph'))
.jsonUrl('./graph.json')
.nodeAutoColorBy('group')
.nodeThreeObject(node => {
const sprite = new SpriteText(node.id);
sprite.material.depthWrite = false; // make sprite background transparent
sprite.color = node.color;
sprite.textHeight = 8;
return sprite;
});

// Spread nodes a little wider
Graph.d3Force('charge').strength(-120);
const normalizeValue = (val, min, max) => (val - min) / (max - min);

const minWeight = 0.00

// CHANGE: move handling of color to backend where calculations are faster
function mapValueToColor(value) {
const clampedValue = Math.max(minWeight, Math.min(value, 1));
// interpolate color between dark gray (#333333 -- 51) and white (#ffffff 255)
// const grayScale = Math.round(51 + (204 * clampedValue));
const grayScale = Math.round(1 + (204 * clampedValue)) * 1.5;
return `rgb(${grayScale}, ${grayScale}, ${grayScale})`;
}

fetch("http://localhost:11975/graph").then(res => res.json())
// fetch("./graph.json").then(res => res.json())
.then(data => {
const Graph = ForceGraph3D()
(document.getElementById('3d-graph'))
// .jsonUrl('./graph.json')
.graphData(data)
.nodeAutoColorBy('group')
.nodeThreeObject(node => {
const splitname = node.id.split("/") // CHANGE: add new field on daemon side for split name
const sprite = new SpriteText(splitname[splitname.length - 1]);
sprite.material.depthWrite = false;
sprite.color = node.color;
sprite.textHeight = 8;
return sprite;
})
.linkOpacity(.8) // NOTE: baseline opacity can be adjusted, but keep high
.linkColor(link => {
const value = link.strength !== undefined ? link.strength : minWeight;
return mapValueToColor(value);
});

// Spread nodes a little wider
Graph.d3Force('charge').strength(-120);
})

// TODO: line weight can be set here, use documentWeight to do this
</script>
</body>
21 changes: 19 additions & 2 deletions internal/api/graph_handlers.go
Original file line number Diff line number Diff line change
@@ -1,19 +1,32 @@
package api

import (
"encoding/json"
"log"
"net/http"
"slices"

"github.com/oolong-sh/oolong/internal/graph"
"github.com/oolong-sh/oolong/internal/keywords"
"github.com/oolong-sh/oolong/internal/notes"
"github.com/oolong-sh/oolong/internal/state"
)

var allowedOrigins = []string{
"http://localhost:8000",
}

func handleGetGraph(w http.ResponseWriter, r *http.Request) {
log.Println("Request received:", r.Method, r.URL, r.Host)
w.Header().Set("Content-Type", "application/json")
origin := r.Header.Get("Origin")

// check if the origin is whitelisted
if !slices.Contains(allowedOrigins, origin) {
log.Println("Requesting client not in allow list. Origin:", origin)
http.Error(w, "Request origin not in allow list", http.StatusForbidden)
return
}
w.Header().Set("Access-Control-Allow-Origin", origin)

// get snapshot of current state
s := state.State()
Expand All @@ -30,7 +43,11 @@ func handleGetGraph(w http.ResponseWriter, r *http.Request) {
}

// encode graph data in reponse
if err := json.NewEncoder(w).Encode(data); err != nil {
if _, err := w.Write(data); err != nil {
http.Error(w, "Error encoding graph data", 500)
return
}
// if err := json.NewEncoder(w).Encode(data); err != nil {
// http.Error(w, "Error encoding graph data", 500)
// }
}
6 changes: 4 additions & 2 deletions internal/api/note_handlers.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,13 +58,14 @@ func handleGetNote(w http.ResponseWriter, r *http.Request) {
// 'POST /note' endpoint handler creates a note file (and any missing directories) corresponding to input path
// Expected request body: { "path": "/path/to/note", "content", "full note contents to write" }
func handleCreateNote(w http.ResponseWriter, r *http.Request) {
log.Println("Request received:", r.Method, r.URL, r.Host, r.Body)
log.Println("Request received:", r.Method, r.URL, r.Host)

// parse request body
var req createUpdateRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
http.Error(w, "Failed to decode request body", 400)
}
log.Println("Request body: ", req)

// check if path before file exists, then check if file exists
if e, err := exists(req.Path); err != nil {
Expand Down Expand Up @@ -95,13 +96,14 @@ func handleCreateNote(w http.ResponseWriter, r *http.Request) {
// It will create files that do not exist, but will not create directories
// Expected request body: { "path": "/path/to/note", "content", "full note contents to write" }
func handleUpdateNote(w http.ResponseWriter, r *http.Request) {
log.Println("Request received:", r.Method, r.URL, r.Host, r.Body)
log.Println("Request received:", r.Method, r.URL, r.Host)

// parse request body
var req createUpdateRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
http.Error(w, "Failed to decode request body", 400)
}
log.Println("Request body: ", req)

// write contents to file
if err := os.WriteFile(req.Path, []byte(req.Content), 0666); err != nil {
Expand Down
27 changes: 19 additions & 8 deletions internal/daemon/watcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"errors"
"io/fs"
"log"
"os"
"path/filepath"
"slices"
"time"
Expand Down Expand Up @@ -50,19 +51,16 @@ func runNotesDirsWatcher(dirs ...string) error {
}

// watcher handler
// go func() { // running entire function as a goroutine, handler doesn't need to be one
for {
select {
case event, ok := <-watcher.Events:
if !ok {
log.Println("Watcher event channel returned bad result.")
return errors.New("Invalid watcher errors channel value.")
}
// log.Println("Event:", event)

// TODO: add new watcher in cases where new directories are created

if event.Has(fsnotify.Write) {
switch {
case event.Has(fsnotify.Write):
log.Println("Modified file:", event.Name)

// write event is sent on write start, wait 500ms for write to finish
Expand All @@ -72,6 +70,22 @@ func runNotesDirsWatcher(dirs ...string) error {
documents.ReadDocuments(event.Name)

// TODO: add dedup timer to prevent multi-write calls

case event.Has(fsnotify.Remove):
log.Println("Removed file/directory", event.Name)
// TODO: remove from state
// - need to be careful with remove event as editors use it when writing files
// - state removal needs to also remove ngrams
// - should only trigger update on file deletions

case event.Has(fsnotify.Create):
log.Println("Created file/directory", event.Name)

if info, err := os.Stat(event.Name); err == nil {
if info.IsDir() {
watcher.Add(event.Name)
}
}
}
case err, ok := <-watcher.Errors:
if !ok {
Expand All @@ -80,7 +94,4 @@ func runNotesDirsWatcher(dirs ...string) error {
log.Println("error:", err)
}
}
// }()
// <-make(chan struct{})
// return nil
}
12 changes: 8 additions & 4 deletions internal/graph/graph.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,9 @@ type NodeJSON struct {
}

type LinkJSON struct {
Source string `json:"source"`
Target string `json:"target"`
Source string `json:"source"`
Target string `json:"target"`
Value float64 `json:"strength"`
}

type Graph struct {
Expand All @@ -34,9 +35,10 @@ func clamp(value, min, max float64) float64 {
return value
}

const NOTE_NODE_VAL = 10
const NOTE_NODE_VAL = 1

func SerializeGraph(keywordMap map[string]keywords.Keyword, notes []notes.Note, lowerBound, upperBound float64) ([]byte, error) {
// func SerializeGraph(keywordMap map[string]keywords.Keyword, notes []notes.Note, lowerBound, upperBound float64) (Graph, error) {
nodes := []NodeJSON{}
links := []LinkJSON{}

Expand All @@ -63,12 +65,13 @@ func SerializeGraph(keywordMap map[string]keywords.Keyword, notes []notes.Note,
})

// Link notes to keywords
for keywordID := range note.Weights {
for keywordID, wgt := range note.Weights {
keyword, exists := keywordMap[keywordID]
if exists && keyword.Weight >= lowerBound {
links = append(links, LinkJSON{
Source: noteID,
Target: keyword.Keyword,
Value: wgt,
})
}
}
Expand All @@ -84,5 +87,6 @@ func SerializeGraph(keywordMap map[string]keywords.Keyword, notes []notes.Note,
return nil, err
}

// return graph, nil
return jsonData, nil
}
13 changes: 8 additions & 5 deletions internal/linking/ngrams/weighting.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,7 @@ var zoneB map[lexer.Zone]float64 = map[lexer.Zone]float64{
func CalcWeights(ngmap map[string]*NGram, N int) {
idf(ngmap, N)
// tfidf(ngmap)
// TODO: decide on k and b values (and allow them to be tweaked from config)
bm25(ngmap)
// CHANGE: probably take n and word length into account

// TODO: move adjustments to weights calculation function
// NOTE: these adjustments are much larger than the bm25 score and probably need to be scaled down

for _, ng := range ngmap {
ng.updateWeight()
Expand Down Expand Up @@ -95,3 +90,11 @@ func FilterMeaningfulNGrams(ngmap map[string]*NGram, minDF int, maxDF int, minAv
}
return out
}

type Doc interface {
// get
}

// TODO:
func NormalizeDocumentWeights() {
}
14 changes: 14 additions & 0 deletions internal/notes/notes.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package notes

import (
"encoding/json"
"math"

"github.com/oolong-sh/oolong/internal/documents"
)
Expand Down Expand Up @@ -29,13 +30,20 @@ func DocumentsToNotes(documents map[string]*documents.Document) []Note {
threshold := 2.0

for k, v := range documents {
weightSum := 0.0
weights := map[string]float64{}

// set weight values
for k, v := range v.Weights {
if v > threshold {
weights[k] = v
weightSum += v * v
}
}

// normalize resulting weights
normalizeWeights(weights, math.Sqrt(weightSum))

notes = append(notes, Note{
Path: k,
Weights: weights,
Expand All @@ -44,3 +52,9 @@ func DocumentsToNotes(documents map[string]*documents.Document) []Note {

return notes
}

func normalizeWeights(m map[string]float64, sum float64) {
for k, v := range m {
m[k] = v / sum
}
}
Loading

0 comments on commit 3c5e591

Please sign in to comment.