diff --git a/api/audiorecording.go b/api/audiorecording.go index 6d12c48..6e05984 100644 --- a/api/audiorecording.go +++ b/api/audiorecording.go @@ -65,16 +65,18 @@ func (api *ManagementAPI) AudioRecordingStatus(w http.ResponseWriter, r *http.Re return } - var result int - err = tc2AgentDbus.Call("org.cacophony.TC2Agent.audiostatus", 0).Store(&result) + var status int + var mode int + err = tc2AgentDbus.Call("org.cacophony.TC2Agent.audiostatus", 0).Store(&mode, &status) if err != nil { log.Println(err) http.Error(w, "Failed to request test audio recoding", http.StatusInternalServerError) return } + rp2040status := map[string]int{"mode": mode, "status": status} w.WriteHeader(http.StatusOK) - json.NewEncoder(w).Encode(result) + json.NewEncoder(w).Encode(rp2040status) } diff --git a/cmd/managementd/main.go b/cmd/managementd/main.go index 426ce72..46ca6e3 100644 --- a/cmd/managementd/main.go +++ b/cmd/managementd/main.go @@ -55,13 +55,13 @@ const ( ) var ( - haveClients = make(chan bool) - version = "" - sockets = make(map[int64]*WebsocketRegistration) - socketsLock sync.RWMutex - headerInfo *headers.HeaderInfo - frameCh = make(chan *FrameData, 4) - currentFrame = -1 + haveClients = make(chan bool) + version = "" + sockets = make(map[int64]*WebsocketRegistration) + socketsLock sync.RWMutex + headerInfo *headers.HeaderInfo + frameCh = make(chan *FrameData, 4) + connected atomic.Bool ) // Set up and handle page requests. @@ -233,7 +233,10 @@ func main() { listener.Close() err = handleConn(conn) + frameCh <- &FrameData{Disconnected: true} log.Printf("camera connection ended with: %v", err) + connected.Store(false) + } }() @@ -262,7 +265,7 @@ func handleConn(conn net.Conn) error { var frame *cptvframe.Frame = cptvframe.NewFrame(headerInfo) var frames int = 0 var lastFrame *FrameData - + connected.Store(true) for { _, err := io.ReadFull(reader, rawFrame) if err != nil { @@ -333,6 +336,10 @@ func WebsocketServer(ws *websocket.Conn) { LastHeartbeatAt: time.Now(), AtomicLock: 0, } + if !connected.Load() { + _ = websocket.Message.Send(ws, "disconnected") + } + socketsLock.Unlock() if firstSocket { log.Print("Get new client register") @@ -369,40 +376,56 @@ func sendFrameToSockets() { lastFrame = <-frameCh if len(sockets) != 0 { - // Make the frame info - buffer := bytes.NewBuffer(make([]byte, 0)) - frameInfo := FrameInfo{ - Camera: map[string]interface{}{"ResX": headerInfo.ResX(), "ResY": headerInfo.ResY()}, - Telemetry: lastFrame.Frame.Status, - Tracks: lastFrame.Tracks, - } - frameInfoJson, _ := json.Marshal(frameInfo) - frameInfoLen := len(frameInfoJson) - // Write out the length of the frameInfo json as a u16 - _ = binary.Write(buffer, binary.LittleEndian, uint16(frameInfoLen)) - _ = binary.Write(buffer, binary.LittleEndian, frameInfoJson) - for _, row := range lastFrame.Frame.Pix { - _ = binary.Write(buffer, binary.LittleEndian, row) - } - // Send the buffer back to the client - frameBytes := buffer.Bytes() - socketsLock.RLock() - for uuid, socket := range sockets { - go func(socket *WebsocketRegistration, uuid int64, frameNum int) { - // If the socket is busy sending the previous frame, - // don't block, just move on to the next socket. - if atomic.CompareAndSwapUint32(&socket.AtomicLock, 0, 1) { - _ = websocket.Message.Send(socket.Socket, frameBytes) - atomic.StoreUint32(&socket.AtomicLock, 0) - } else { - // Locked, skip this frame to let client catch up. - log.Println("Skipping frame for", uuid, frameNum) - } - }(socket, uuid, frameNum) + if lastFrame.Disconnected { + socketsLock.RLock() + for uuid, socket := range sockets { + go func(socket *WebsocketRegistration, uuid int64, frameNum int) { + // If the socket is busy sending the previous frame, + // don't block, just move on to the next socket. + if atomic.CompareAndSwapUint32(&socket.AtomicLock, 0, 1) { + _ = websocket.Message.Send(socket.Socket, "disconnected") + atomic.StoreUint32(&socket.AtomicLock, 0) + } else { + time.Sleep(100 * time.Millisecond) + } + }(socket, uuid, frameNum) + } + socketsLock.RUnlock() + } else { + // Make the frame info + buffer := bytes.NewBuffer(make([]byte, 0)) + frameInfo := FrameInfo{ + Camera: map[string]interface{}{"ResX": headerInfo.ResX(), "ResY": headerInfo.ResY()}, + Telemetry: lastFrame.Frame.Status, + Tracks: lastFrame.Tracks, + } + frameInfoJson, _ := json.Marshal(frameInfo) + frameInfoLen := len(frameInfoJson) + // Write out the length of the frameInfo json as a u16 + _ = binary.Write(buffer, binary.LittleEndian, uint16(frameInfoLen)) + _ = binary.Write(buffer, binary.LittleEndian, frameInfoJson) + for _, row := range lastFrame.Frame.Pix { + _ = binary.Write(buffer, binary.LittleEndian, row) + } + // Send the buffer back to the client + frameBytes := buffer.Bytes() + socketsLock.RLock() + for uuid, socket := range sockets { + go func(socket *WebsocketRegistration, uuid int64, frameNum int) { + // If the socket is busy sending the previous frame, + // don't block, just move on to the next socket. + if atomic.CompareAndSwapUint32(&socket.AtomicLock, 0, 1) { + _ = websocket.Message.Send(socket.Socket, frameBytes) + atomic.StoreUint32(&socket.AtomicLock, 0) + } else { + // Locked, skip this frame to let client catch up. + log.Println("Skipping frame for", uuid, frameNum) + } + }(socket, uuid, frameNum) + } + socketsLock.RUnlock() + frameNum = lastFrame.Frame.Status.FrameCount } - socketsLock.RUnlock() - frameNum = lastFrame.Frame.Status.FrameCount - var socketsToRemove []int64 socketsLock.RLock() for uuid, socket := range sockets { @@ -432,6 +455,7 @@ func sendFrameToSockets() { } type FrameData struct { - Frame *cptvframe.Frame - Tracks []map[string]interface{} + Disconnected bool + Frame *cptvframe.Frame + Tracks []map[string]interface{} } diff --git a/static/js/about.js b/static/js/about.js index 429627f..c2aa5e7 100644 --- a/static/js/about.js +++ b/static/js/about.js @@ -24,7 +24,8 @@ async function readAutoUpdate() { var res = await fetch("/api/auto-update", { headers: authHeaders }); if (res.ok) { resJson = await res.json(); - document.getElementById('auto-update-checkbox').checked = resJson.autoUpdate; + document.getElementById("auto-update-checkbox").checked = + resJson.autoUpdate; } } @@ -122,15 +123,20 @@ async function updateSaltState() { var data = JSON.parse(await response.text()); if (data.RunningUpdate) { - document.getElementById("salt-update-button").setAttribute("disabled", true); - document.getElementById("salt-update-button").textContent = "Running Salt Update..."; + document + .getElementById("salt-update-button") + .setAttribute("disabled", true); + document.getElementById("salt-update-button").textContent = + "Running Salt Update..."; setTimeout(updateSaltState, 2000); } else { enableSaltButton(); } - document.getElementById("salt-update-progress").textContent = data.UpdateProgressPercentage; - document.getElementById("salt-update-progress-text").textContent = data.UpdateProgressStr; + document.getElementById("salt-update-progress").textContent = + data.UpdateProgressPercentage; + document.getElementById("salt-update-progress-text").textContent = + data.UpdateProgressStr; document.getElementById("running-salt-command").textContent = data.RunningUpdate ? "Yes" : "No"; document.getElementById("running-salt-arguements").textContent = @@ -156,7 +162,8 @@ async function updateSaltState() { function enableSaltButton() { document.getElementById("salt-update-button").removeAttribute("disabled"); - document.getElementById("salt-update-button").textContent = "Run Salt Update..."; + document.getElementById("salt-update-button").textContent = + "Run Salt Update..."; } var runningSaltUpdate = true; @@ -202,37 +209,40 @@ function pollSaltUpdateState() { } function getEnvironmentState() { - fetch('/api/salt-grains', { - headers: authHeaders + fetch("/api/salt-grains", { + headers: authHeaders, }) - .then(response => response.json()) - .then(data => { + .then((response) => response.json()) + .then((data) => { if (data.environment) { - document.getElementById('environment-select').value = data.environment; + document.getElementById("environment-select").value = data.environment; } - }) - .catch(error => console.error('Error fetching environment state:', error)); + }) + .catch((error) => + console.error("Error fetching environment state:", error) + ); } async function setEnvironment() { $("#set-environment-button").attr("disabled", true); $("#set-environment-button").html("Setting Environment"); - const selectedEnvironment = document.getElementById('environment-select').value; + const selectedEnvironment = + document.getElementById("environment-select").value; headers = authHeaders; - headers.append('Content-Type', 'application/json'); + headers.append("Content-Type", "application/json"); try { - var response = await fetch('/api/salt-grains', { - method: 'POST', - headers: headers, - body: JSON.stringify({ environment: selectedEnvironment }) - }) + var response = await fetch("/api/salt-grains", { + method: "POST", + headers: headers, + body: JSON.stringify({ environment: selectedEnvironment }), + }); if (response.ok) { - alert('Environment set successfully'); + alert("Environment set successfully"); } else { - alert('Failed to set environment'); + alert("Failed to set environment"); } } catch (error) { - console.error('Error setting environment:', error); + console.error("Error setting environment:", error); } $("#set-environment-button").attr("disabled", false); $("#set-environment-button").html("Set Environment"); diff --git a/static/js/audiorecording.ts b/static/js/audiorecording.ts index 8cf2d92..3f2e2ea 100644 --- a/static/js/audiorecording.ts +++ b/static/js/audiorecording.ts @@ -5,12 +5,16 @@ let lastState: number | null = null; let countdown = 0; async function getAudioStatus() { var xmlHttp = new XMLHttpRequest(); + xmlHttp.responseType = "json"; xmlHttp.open("GET", "/api/audio/audio-status", true); xmlHttp.setRequestHeader("Authorization", "Basic " + btoa("admin:feathers")); var success = false; xmlHttp.onload = async function () { if (xmlHttp.status == 200) { - const state = Number(xmlHttp.response); + const rp2040state = xmlHttp.response; + const state = Number(rp2040state.status); + const mode = Number(rp2040state.mode); + let statusText = ""; if (state == 1) { countdown = 2; @@ -52,7 +56,8 @@ async function getAudioStatus() { } } else if (state == 4) { countdown = 2; - statusText = "Already Taking a Recording"; + let recType = mode == 1 ? "an audio" : "a thermal"; + statusText = `Already Taking ${recType} Recording`; if (lastState != 4) { clearInterval(intervalId as number); document diff --git a/static/js/camera.ts b/static/js/camera.ts index adcd089..59085bb 100644 --- a/static/js/camera.ts +++ b/static/js/camera.ts @@ -1,5 +1,51 @@ import { FrameInfo, Frame, Region, CameraInfo } from "../../api/types"; +let audioOnly = false; +async function getAudioMode() { + var xmlHttp = new XMLHttpRequest(); + xmlHttp.responseType = "json"; + xmlHttp.open("GET", "/api/audiorecording", true); + xmlHttp.setRequestHeader("Authorization", "Basic " + btoa("admin:feathers")); + var success = false; + xmlHttp.onload = async function () { + if (xmlHttp.status == 200) { + audioOnly = xmlHttp.response["audio-mode"] == "AudioOnly"; + } + }; + xmlHttp.onerror = async function () { + console.log("Error getting audio status"); + }; + await xmlHttp.send(); +} + +async function getAudioStatus() { + var xmlHttp = new XMLHttpRequest(); + xmlHttp.responseType = "json"; + xmlHttp.open("GET", "/api/audio/audio-status", true); + xmlHttp.setRequestHeader("Authorization", "Basic " + btoa("admin:feathers")); + var success = false; + xmlHttp.onload = async function () { + if (xmlHttp.status == 200) { + const rp2040state = xmlHttp.response; + const state = Number(rp2040state.status); + const mode = Number(rp2040state.mode); + let statusText = ""; + if (mode == 1) { + //in audio mode possibly could handle states, but should either be + //recording and about to record + document.getElementById("snapshot-stopped-message")!.innerText = + "Waiting for audio recording to finish"; + + document.getElementById("snapshot-stopped")!.style.display = ""; + document.getElementById("snapshot-restart")!.style.display = "none"; + } + } + }; + xmlHttp.onerror = async function () { + console.log("Error getting audio status"); + }; + xmlHttp.send(); +} export const BlobReader = (function (): { arrayBuffer: (blob: Blob) => Promise; } { @@ -292,6 +338,7 @@ export class CameraConnection { this.closing = false; this.connect(); } + private thermalConnected: boolean = false; private state: CameraState = { socket: null, UUID: new Date().getTime(), @@ -366,8 +413,35 @@ export class CameraConnection { }); this.state.socket.addEventListener("message", async (event) => { if (event.data instanceof Blob) { + if (!this.thermalConnected) { + //make sure ui is in good state + document + .getElementById("take-snapshot-recording")! + .removeAttribute("disabled"); + + document.getElementById("snapshot-stopped")!.style.display = "none"; + document.getElementById("snapshot-restart")!.style.display = ""; + } + this.thermalConnected = true; + this.onFrame((await this.parseFrame(event.data as Blob)) as Frame); } else { + if (event.data == "disconnected") { + await getAudioMode(); + this.thermalConnected = false; + document + .getElementById("take-snapshot-recording")! + .setAttribute("disabled", "true"); + if (audioOnly == true) { + document.getElementById("snapshot-stopped-message")!.innerText = + 'In Audio only mode, change the mode in the "Audio Recording" section'; + + document.getElementById("snapshot-stopped")!.style.display = ""; + document.getElementById("snapshot-restart")!.style.display = "none"; + } else { + getAudioStatus(); + } + } console.log("got message", event.data); } snapshotCount++;