Browse Source

Added takeover sign

TC 2 weeks ago
parent
commit
96ba90934a

+ 3 - 3
remdeskd/api.go

@@ -6,11 +6,11 @@ func registerAPIRoutes() {
 	// Start the web server
 	http.Handle("/", http.FileServer(webfs))
 	http.HandleFunc("/hid", usbKVM.HIDWebSocketHandler)
-	http.HandleFunc("/audio", videoCapture.AudioStreamingHandler)
-	http.HandleFunc("/stream", videoCapture.ServeVideoStream)
-
+	http.HandleFunc("/audio", usbCaptureDevice.AudioStreamingHandler)
+	http.HandleFunc("/stream", usbCaptureDevice.ServeVideoStream)
 }
 
+// Aux APIs for USB KVM mode
 func registerLocalAuxRoutes() {
 	http.HandleFunc("/aux/switchusbkvm", auxMCU.HandleSwitchUSBToKVM)
 	http.HandleFunc("/aux/switchusbremote", auxMCU.HandleSwitchUSBToRemote)

+ 78 - 45
remdeskd/mod/usbcapture/audio_device.go

@@ -9,6 +9,8 @@ import (
 	"os/exec"
 	"regexp"
 	"strings"
+	"syscall"
+	"time"
 
 	"github.com/gorilla/websocket"
 )
@@ -97,53 +99,23 @@ func GetDefaultAudioConfig() *AudioConfig {
 	}
 }
 
-// Downsample48kTo24kStereo downsamples a 48kHz stereo audio buffer to 24kHz.
-// It assumes the input buffer is in 16-bit stereo format (2 bytes per channel).
-// The output buffer will also be in 16-bit stereo format.
-func downsample48kTo24kStereo(buf []byte) []byte {
-	const frameSize = 4 // 2 bytes per channel × 2 channels
-	if len(buf)%frameSize != 0 {
-		// Trim incomplete frame (rare case)
-		buf = buf[:len(buf)-len(buf)%frameSize]
-	}
-
-	out := make([]byte, 0, len(buf)/2)
-
-	for i := 0; i < len(buf); i += frameSize * 2 {
-		// Copy every other frame (drop 1 in 2)
-		if i+frameSize <= len(buf) {
-			out = append(out, buf[i:i+frameSize]...)
-		}
-	}
-
-	return out
-}
-
-// Downsample48kTo16kStereo downsamples a 48kHz stereo audio buffer to 16kHz.
-// It assumes the input buffer is in 16-bit stereo format (2 bytes per channel).
-// The output buffer will also be in 16-bit stereo format.
-func downsample48kTo16kStereo(buf []byte) []byte {
-	const frameSize = 4 // 2 bytes per channel × 2 channels
-	if len(buf)%frameSize != 0 {
-		// Trim incomplete frame (rare case)
-		buf = buf[:len(buf)-len(buf)%frameSize]
+func GetDefaultAudioDevice() string {
+	//Check if the default ALSA device exists
+	if _, err := os.Stat("/dev/snd/pcmC0D0c"); err == nil {
+		return "/dev/snd/pcmC0D0c"
 	}
 
-	out := make([]byte, 0, len(buf)/3)
-
-	for i := 0; i < len(buf); i += frameSize * 3 {
-		// Copy every third frame (drop 2 in 3)
-		if i+frameSize <= len(buf) {
-			out = append(out, buf[i:i+frameSize]...)
-		}
+	//If not, list all capture devices and return the first one
+	devs, err := ListCaptureDevices()
+	if err != nil || len(devs) == 0 {
+		return ""
 	}
 
-	return out
+	return devs[0]
 }
 
 // AudioStreamingHandler handles incoming WebSocket connections for audio streaming.
-func (i Instance) AudioStreamingHandler(w http.ResponseWriter, r *http.Request) {
-
+func (i *Instance) AudioStreamingHandler(w http.ResponseWriter, r *http.Request) {
 	// Check if the request contains ?quality=low
 	quality := r.URL.Query().Get("quality")
 	qualityKey := []string{"low", "standard", "high"}
@@ -162,10 +134,19 @@ func (i Instance) AudioStreamingHandler(w http.ResponseWriter, r *http.Request)
 	}
 	defer conn.Close()
 
-	if i.audiostopchan != nil {
+	if alsa_device_occupied(i.Config.AudioDeviceName) {
 		//Another instance already running
 		log.Println("Audio pipe already running, stopping previous instance")
 		i.audiostopchan <- true
+		retryCounter := 0
+		for alsa_device_occupied(i.Config.AudioDeviceName) {
+			time.Sleep(500 * time.Millisecond) //Wait a bit for the previous instance to stop
+			retryCounter++
+			if retryCounter > 5 {
+				log.Println("Failed to stop previous audio instance")
+				return
+			}
+		}
 	}
 
 	//Get the capture card audio input
@@ -189,7 +170,6 @@ func (i Instance) AudioStreamingHandler(w http.ResponseWriter, r *http.Request)
 	log.Println("Using hardware device:", hwdev)
 
 	// Create a buffered reader to read audio data
-	i.audiostopchan = make(chan bool, 1)
 	log.Println("Starting audio pipe with arecord...")
 
 	// Start arecord with 48kHz, 16-bit, stereo
@@ -233,6 +213,7 @@ func (i Instance) AudioStreamingHandler(w http.ResponseWriter, r *http.Request)
 	}()
 
 	log.Println("Starting audio capture loop...")
+	i.isAudioStreaming = true
 	for {
 		select {
 		case <-i.audiostopchan:
@@ -268,7 +249,6 @@ func (i Instance) AudioStreamingHandler(w http.ResponseWriter, r *http.Request)
 				n = len(downsampled)                            // Update n to the new length
 			}
 
-			//log.Println("Read bytes:", n, "size of buffer:", len(buf))
 			//Send only the bytes read to WebSocket
 			err = conn.WriteMessage(websocket.BinaryMessage, downsampled[:n])
 			if err != nil {
@@ -279,8 +259,61 @@ func (i Instance) AudioStreamingHandler(w http.ResponseWriter, r *http.Request)
 	}
 
 DONE:
-
-	i.audiostopchan <- true // Signal to stop the audio pipe
+	i.isAudioStreaming = false
 	cmd.Process.Kill()
 	log.Println("Audio pipe finished")
 }
+
+// Downsample48kTo24kStereo downsamples a 48kHz stereo audio buffer to 24kHz.
+// It assumes the input buffer is in 16-bit stereo format (2 bytes per channel).
+// The output buffer will also be in 16-bit stereo format.
+func downsample48kTo24kStereo(buf []byte) []byte {
+	const frameSize = 4 // 2 bytes per channel × 2 channels
+	if len(buf)%frameSize != 0 {
+		// Trim incomplete frame (rare case)
+		buf = buf[:len(buf)-len(buf)%frameSize]
+	}
+
+	out := make([]byte, 0, len(buf)/2)
+
+	for i := 0; i < len(buf); i += frameSize * 2 {
+		// Copy every other frame (drop 1 in 2)
+		if i+frameSize <= len(buf) {
+			out = append(out, buf[i:i+frameSize]...)
+		}
+	}
+
+	return out
+}
+
+// Downsample48kTo16kStereo downsamples a 48kHz stereo audio buffer to 16kHz.
+// It assumes the input buffer is in 16-bit stereo format (2 bytes per channel).
+// The output buffer will also be in 16-bit stereo format.
+func downsample48kTo16kStereo(buf []byte) []byte {
+	const frameSize = 4 // 2 bytes per channel × 2 channels
+	if len(buf)%frameSize != 0 {
+		// Trim incomplete frame (rare case)
+		buf = buf[:len(buf)-len(buf)%frameSize]
+	}
+
+	out := make([]byte, 0, len(buf)/3)
+
+	for i := 0; i < len(buf); i += frameSize * 3 {
+		// Copy every third frame (drop 2 in 3)
+		if i+frameSize <= len(buf) {
+			out = append(out, buf[i:i+frameSize]...)
+		}
+	}
+
+	return out
+}
+
+func alsa_device_occupied(dev string) bool {
+	f, err := os.OpenFile(dev, os.O_RDONLY|syscall.O_NONBLOCK, 0)
+	if err != nil {
+		//result <- true // Occupied or cannot open
+		return true
+	}
+	f.Close()
+	return false
+}

+ 6 - 3
remdeskd/mod/usbcapture/typedef.go

@@ -22,9 +22,8 @@ type AudioConfig struct {
 }
 
 type Config struct {
-	DeviceName      string       // The video device name, e.g., /dev/video0
+	VideoDeviceName string       // The video device name, e.g., /dev/video0
 	AudioDeviceName string       // The audio device name, e.g., /dev/snd
-	AudioEnabled    bool         // Exists support for audio capture
 	AudioConfig     *AudioConfig // The audio configuration
 }
 
@@ -45,6 +44,10 @@ type Instance struct {
 	streamInfo         string
 
 	/* audio capture device */
-	audiostopchan chan bool // Channel to stop audio capture
+	isAudioStreaming bool      // Whether audio is currently being captured
+	audiostopchan    chan bool // Channel to stop audio capture
 
+	/* Concurrent access */
+	accessCount       int       // The number of current access, in theory each instance should at most have 1 access
+	videoTakeoverChan chan bool // Channel to signal video takeover request
 }

+ 21 - 163
remdeskd/mod/usbcapture/usbcapture.go

@@ -1,19 +1,8 @@
 package usbcapture
 
 import (
-	"context"
-	"errors"
 	"fmt"
-	"log"
-	"mime/multipart"
-	"net/http"
-	"net/textproto"
 	"os"
-	"strings"
-	"syscall"
-
-	"github.com/vladimirvivien/go4vl/device"
-	"github.com/vladimirvivien/go4vl/v4l2"
 )
 
 // NewInstance creates a new video capture instance
@@ -23,158 +12,51 @@ func NewInstance(config *Config) (*Instance, error) {
 	}
 
 	//Check if the video device exists
-	if _, err := os.Stat(config.DeviceName); os.IsNotExist(err) {
-		return nil, fmt.Errorf("video device %s does not exist", config.DeviceName)
+	if _, err := os.Stat(config.VideoDeviceName); os.IsNotExist(err) {
+		return nil, fmt.Errorf("video device %s does not exist", config.VideoDeviceName)
 	} else if err != nil {
 		return nil, fmt.Errorf("failed to check video device: %w", err)
 	}
 
 	//Check if the device file actualy points to a video device
-	isValidDevice, err := checkVideoCaptureDevice(config.DeviceName)
+	isValidDevice, err := checkVideoCaptureDevice(config.VideoDeviceName)
 	if err != nil {
 		return nil, fmt.Errorf("failed to check video device: %w", err)
 	}
 
 	if !isValidDevice {
-		return nil, fmt.Errorf("device %s is not a video capture device", config.DeviceName)
+		return nil, fmt.Errorf("device %s is not a video capture device", config.VideoDeviceName)
 	}
 
 	//Get the supported resolutions of the video device
-	formatInfo, err := GetV4L2FormatInfo(config.DeviceName)
+	formatInfo, err := GetV4L2FormatInfo(config.VideoDeviceName)
 	if err != nil {
 		return nil, fmt.Errorf("failed to get video device format info: %w", err)
 	}
 
 	if len(formatInfo) == 0 {
-		return nil, fmt.Errorf("no supported formats found for device %s", config.DeviceName)
+		return nil, fmt.Errorf("no supported formats found for device %s", config.VideoDeviceName)
 	}
 
 	return &Instance{
 		Config:               config,
 		Capturing:            false,
 		SupportedResolutions: formatInfo,
-	}, nil
-}
-
-// start http service
-func (i *Instance) ServeVideoStream(w http.ResponseWriter, req *http.Request) {
-	mimeWriter := multipart.NewWriter(w)
-	w.Header().Set("Content-Type", fmt.Sprintf("multipart/x-mixed-replace; boundary=%s", mimeWriter.Boundary()))
-	partHeader := make(textproto.MIMEHeader)
-	partHeader.Add("Content-Type", "image/jpeg")
-
-	var frame []byte
-	for frame = range i.frames_buff {
-		if len(frame) == 0 {
-			log.Print("skipping empty frame")
-			continue
-		}
-
-		partWriter, err := mimeWriter.CreatePart(partHeader)
-		if err != nil {
-			log.Printf("failed to create multi-part writer: %s", err)
-			return
-		}
-
-		if _, err := partWriter.Write(frame); err != nil {
-			if errors.Is(err, syscall.EPIPE) {
-				//broken pipe, the client browser has exited
-				return
-			}
-			log.Printf("failed to write image: %s", err)
-		}
-
-	}
-}
-
-// start video capture
-func (i *Instance) StartVideoCapture(openWithResolution *CaptureResolution) error {
-	if i.Capturing {
-		return fmt.Errorf("video capture already started")
-	}
-
-	devName := i.Config.DeviceName
-	frameRate := 25
-	buffSize := 8
-	format := "mjpeg"
 
-	if openWithResolution == nil {
-		return fmt.Errorf("resolution not provided")
-	}
+		// Videos
+		camera:     nil,
+		pixfmt:     0,
+		width:      0,
+		height:     0,
+		streamInfo: "",
 
-	//Check if the video device is a capture device
-	isCaptureDev, err := checkVideoCaptureDevice(devName)
-	if err != nil {
-		return fmt.Errorf("failed to check video device: %w", err)
-	}
-	if !isCaptureDev {
-		return fmt.Errorf("device %s is not a video capture device", devName)
-	}
-
-	//Check if the selected FPS is valid in the provided Resolutions
-	resolutionIsSupported, err := deviceSupportResolution(i.Config.DeviceName, openWithResolution)
-	if err != nil {
-		return err
-	}
-
-	if !resolutionIsSupported {
-		return errors.New("this device do not support the required resolution settings")
-	}
+		//Audio
+		audiostopchan: make(chan bool, 1),
 
-	//Open the video device
-	camera, err := device.Open(devName,
-		device.WithIOType(v4l2.IOTypeMMAP),
-		device.WithPixFormat(v4l2.PixFormat{
-			PixelFormat: getFormatType(format),
-			Width:       uint32(openWithResolution.Width),
-			Height:      uint32(openWithResolution.Height),
-			Field:       v4l2.FieldAny,
-		}),
-		device.WithFPS(uint32(frameRate)),
-		device.WithBufferSize(uint32(buffSize)),
-	)
-
-	if err != nil {
-		return fmt.Errorf("failed to open video device: %w", err)
-	}
-
-	i.camera = camera
-
-	caps := camera.Capability()
-	log.Printf("device [%s] opened\n", devName)
-	log.Printf("device info: %s", caps.String())
-	//2025/03/16 15:45:25 device info: driver: uvcvideo; card: USB Video: USB Video; bus info: usb-0000:00:14.0-2
-
-	// set device format
-	currFmt, err := camera.GetPixFormat()
-	if err != nil {
-		log.Fatalf("unable to get format: %s", err)
-	}
-	log.Printf("Current format: %s", currFmt)
-	//2025/03/16 15:45:25 Current format: Motion-JPEG [1920x1080]; field=any; bytes per line=0; size image=0; colorspace=Default; YCbCr=Default; Quant=Default; XferFunc=Default
-	i.pixfmt = currFmt.PixelFormat
-	i.width = int(currFmt.Width)
-	i.height = int(currFmt.Height)
-
-	i.streamInfo = fmt.Sprintf("%s - %s [%dx%d] %d fps",
-		caps.Card,
-		v4l2.PixelFormats[currFmt.PixelFormat],
-		currFmt.Width, currFmt.Height, frameRate,
-	)
-
-	// start capture
-	ctx, cancel := context.WithCancel(context.TODO())
-	if err := camera.Start(ctx); err != nil {
-		log.Fatalf("stream capture: %s", err)
-	}
-	i.cameraStartContext = cancel
-
-	// video stream
-	i.frames_buff = camera.GetOutput()
-
-	log.Printf("device capture started (buffer size set %d)", camera.BufferCount())
-	i.Capturing = true
-	return nil
+		// Access control
+		videoTakeoverChan: make(chan bool, 1),
+		accessCount:       0,
+	}, nil
 }
 
 // GetStreamInfo returns the stream information string
@@ -187,15 +69,9 @@ func (i *Instance) IsCapturing() bool {
 	return i.Capturing
 }
 
-// StopCapture stops the video capture and closes the camera device
-func (i *Instance) StopCapture() error {
-	if i.camera != nil {
-		i.cameraStartContext()
-		i.camera.Close()
-		i.camera = nil
-	}
-	i.Capturing = false
-	return nil
+// IsAudioStreaming checks if the audio is currently being captured
+func (i *Instance) IsAudioStreaming() bool {
+	return i.isAudioStreaming
 }
 
 // Close closes the camera device and releases resources
@@ -205,21 +81,3 @@ func (i *Instance) Close() error {
 	}
 	return nil
 }
-
-func getFormatType(fmtStr string) v4l2.FourCCType {
-	switch strings.ToLower(fmtStr) {
-	case "jpeg":
-		return v4l2.PixelFmtJPEG
-	case "mpeg":
-		return v4l2.PixelFmtMPEG
-	case "mjpeg":
-		return v4l2.PixelFmtMJPEG
-	case "h264", "h.264":
-		return v4l2.PixelFmtH264
-	case "yuyv":
-		return v4l2.PixelFmtYUYV
-	case "rgb":
-		return v4l2.PixelFmtRGB24
-	}
-	return v4l2.PixelFmtMPEG
-}

+ 232 - 0
remdeskd/mod/usbcapture/video_device.go

@@ -3,11 +3,22 @@ package usbcapture
 import (
 	"bufio"
 	"bytes"
+	"context"
+	_ "embed"
+	"errors"
 	"fmt"
+	"log"
+	"mime/multipart"
+	"net/http"
+	"net/textproto"
 	"os/exec"
 	"regexp"
 	"strconv"
 	"strings"
+	"syscall"
+
+	"github.com/vladimirvivien/go4vl/device"
+	"github.com/vladimirvivien/go4vl/v4l2"
 )
 
 /*
@@ -36,6 +47,191 @@ type SizeInfo struct {
 	FPS    []int
 }
 
+//go:embed stream_takeover.jpg
+var endOfStreamJPG []byte
+
+// start video capture
+func (i *Instance) StartVideoCapture(openWithResolution *CaptureResolution) error {
+	if i.Capturing {
+		return fmt.Errorf("video capture already started")
+	}
+
+	if openWithResolution.FPS == 0 {
+		openWithResolution.FPS = 25 //Default to 25 FPS
+	}
+
+	devName := i.Config.VideoDeviceName
+	if openWithResolution == nil {
+		return fmt.Errorf("resolution not provided")
+	}
+	frameRate := openWithResolution.FPS
+	buffSize := 8 //No. of frames to buffer
+	//Default to MJPEG
+	//Other formats that are commonly supported are YUYV, H264, MJPEG
+	format := "mjpeg"
+
+	//Check if the video device is a capture device
+	isCaptureDev, err := checkVideoCaptureDevice(devName)
+	if err != nil {
+		return fmt.Errorf("failed to check video device: %w", err)
+	}
+	if !isCaptureDev {
+		return fmt.Errorf("device %s is not a video capture device", devName)
+	}
+
+	//Check if the selected FPS is valid in the provided Resolutions
+	resolutionIsSupported, err := deviceSupportResolution(i.Config.VideoDeviceName, openWithResolution)
+	if err != nil {
+		return err
+	}
+	if !resolutionIsSupported {
+		return errors.New("this device do not support the required resolution settings")
+	}
+
+	//Open the video device
+	camera, err := device.Open(devName,
+		device.WithIOType(v4l2.IOTypeMMAP),
+		device.WithPixFormat(v4l2.PixFormat{
+			PixelFormat: getFormatType(format),
+			Width:       uint32(openWithResolution.Width),
+			Height:      uint32(openWithResolution.Height),
+			Field:       v4l2.FieldAny,
+		}),
+		device.WithFPS(uint32(frameRate)),
+		device.WithBufferSize(uint32(buffSize)),
+	)
+
+	if err != nil {
+		return fmt.Errorf("failed to open video device: %w", err)
+	}
+
+	i.camera = camera
+	caps := camera.Capability()
+	log.Printf("device [%s] opened\n", devName)
+	log.Printf("device info: %s", caps.String())
+	// Should get something like this:
+	//2025/03/16 15:45:25 device info: driver: uvcvideo; card: USB Video: USB Video; bus info: usb-0000:00:14.0-2
+
+	// set device format
+	currFmt, err := camera.GetPixFormat()
+	if err != nil {
+		return fmt.Errorf("failed to get current pixel format: %w", err)
+	}
+	log.Printf("Current format: %s", currFmt)
+	//2025/03/16 15:45:25 Current format: Motion-JPEG [1920x1080]; field=any; bytes per line=0; size image=0; colorspace=Default; YCbCr=Default; Quant=Default; XferFunc=Default
+	i.pixfmt = currFmt.PixelFormat
+	i.width = int(currFmt.Width)
+	i.height = int(currFmt.Height)
+
+	i.streamInfo = fmt.Sprintf("%s - %s [%dx%d] %d fps",
+		caps.Card,
+		v4l2.PixelFormats[currFmt.PixelFormat],
+		currFmt.Width, currFmt.Height, frameRate,
+	)
+
+	// start capture
+	ctx, cancel := context.WithCancel(context.TODO())
+	if err := camera.Start(ctx); err != nil {
+		log.Fatalf("stream capture: %s", err)
+	}
+	i.cameraStartContext = cancel
+
+	// video stream
+	i.frames_buff = camera.GetOutput()
+
+	log.Printf("device capture started (buffer size set %d)", camera.BufferCount())
+	i.Capturing = true
+	return nil
+}
+
+// start http service
+func (i *Instance) ServeVideoStream(w http.ResponseWriter, req *http.Request) {
+	//Check if the access count is already 1, if so, kick out the previous access
+	if i.accessCount >= 1 {
+		log.Println("Another client is already connected, kicking out the previous client...")
+		if i.videoTakeoverChan != nil {
+			i.videoTakeoverChan <- true
+		}
+		log.Println("Previous client kicked out, taking over the stream...")
+	}
+	i.accessCount++
+	defer func() { i.accessCount-- }()
+
+	// Set up the multipart response
+	mimeWriter := multipart.NewWriter(w)
+	w.Header().Set("Content-Type", fmt.Sprintf("multipart/x-mixed-replace; boundary=%s", mimeWriter.Boundary()))
+	partHeader := make(textproto.MIMEHeader)
+	partHeader.Add("Content-Type", "image/jpeg")
+
+	var frame []byte
+
+	//Chrome MJPEG decoder cannot decode the first frame from MS2109 capture card for unknown reason
+	//Thus we are discarding the first frame here
+	if i.frames_buff != nil {
+		select {
+		case <-i.frames_buff:
+			// Discard the first frame
+		default:
+			// No frame to discard
+		}
+	}
+
+	// Streaming loop
+	for frame = range i.frames_buff {
+		if len(frame) == 0 {
+			log.Print("skipping empty frame")
+			continue
+		}
+
+		partWriter, err := mimeWriter.CreatePart(partHeader)
+		if err != nil {
+			log.Printf("failed to create multi-part writer: %s", err)
+			return
+		}
+
+		if _, err := partWriter.Write(frame); err != nil {
+			if errors.Is(err, syscall.EPIPE) {
+				//broken pipe, the client browser has exited
+				return
+			}
+			log.Printf("failed to write image: %s", err)
+		}
+
+		select {
+		case <-req.Context().Done():
+			// Client disconnected, exit the loop
+			return
+		case <-i.videoTakeoverChan:
+			// Another client is taking over, exit the loop
+
+			//Send the endofstream.jpg as last frame before exit
+			endFrameHeader := make(textproto.MIMEHeader)
+			endFrameHeader.Add("Content-Type", "image/jpeg")
+			endFrameHeader.Add("Content-Length", fmt.Sprint(len(endOfStreamJPG)))
+			partWriter, err := mimeWriter.CreatePart(endFrameHeader)
+			if err == nil {
+				partWriter.Write(endOfStreamJPG)
+			}
+			log.Println("Video stream taken over by another client, exiting...")
+			return
+		default:
+			// Continue streaming
+		}
+
+	}
+}
+
+// StopCapture stops the video capture and closes the camera device
+func (i *Instance) StopCapture() error {
+	if i.camera != nil {
+		i.cameraStartContext()
+		i.camera.Close()
+		i.camera = nil
+	}
+	i.Capturing = false
+	return nil
+}
+
 // CheckVideoCaptureDevice checks if the given video device is a video capture device
 func checkVideoCaptureDevice(device string) (bool, error) {
 	// Run v4l2-ctl to get device capabilities
@@ -53,6 +249,23 @@ func checkVideoCaptureDevice(device string) (bool, error) {
 	return false, nil
 }
 
+// GetDefaultVideoDevice returns the first available video capture device, e.g., /dev/video0
+func GetDefaultVideoDevice() (string, error) {
+	// List all /dev/video* devices and return the first one that is a video capture device
+	for i := 0; i < 10; i++ {
+		device := fmt.Sprintf("/dev/video%d", i)
+		isCapture, err := checkVideoCaptureDevice(device)
+		if err != nil {
+			continue
+		}
+		if isCapture {
+			return device, nil
+		}
+	}
+	return "", fmt.Errorf("no video capture device found")
+}
+
+// deviceSupportResolution checks if the given video device supports the specified resolution and frame rate
 func deviceSupportResolution(devicePath string, resolution *CaptureResolution) (bool, error) {
 	formatInfo, err := GetV4L2FormatInfo(devicePath)
 	if err != nil {
@@ -78,6 +291,7 @@ func deviceSupportResolution(devicePath string, resolution *CaptureResolution) (
 	return false, nil
 }
 
+// PrintV4L2FormatInfo prints the supported formats, resolutions, and frame rates of the given video device
 func PrintV4L2FormatInfo(devicePath string) {
 	// Check if the device is a video capture device
 	isCapture, err := checkVideoCaptureDevice(devicePath)
@@ -180,3 +394,21 @@ func GetV4L2FormatInfo(devicePath string) ([]FormatInfo, error) {
 
 	return formats, nil
 }
+
+func getFormatType(fmtStr string) v4l2.FourCCType {
+	switch strings.ToLower(fmtStr) {
+	case "jpeg":
+		return v4l2.PixelFmtJPEG
+	case "mpeg":
+		return v4l2.PixelFmtMPEG
+	case "mjpeg":
+		return v4l2.PixelFmtMJPEG
+	case "h264", "h.264":
+		return v4l2.PixelFmtH264
+	case "yuyv":
+		return v4l2.PixelFmtYUYV
+	case "rgb":
+		return v4l2.PixelFmtRGB24
+	}
+	return v4l2.PixelFmtMPEG
+}

+ 8 - 7
remdeskd/usbkvm.go

@@ -39,7 +39,7 @@ var (
 	/* Internal variables for USB-KVM mode only */
 	usbKVM              *remdeshid.Controller
 	auxMCU              *remdesaux.AuxMcu
-	videoCapture        *usbcapture.Instance
+	usbCaptureDevice    *usbcapture.Instance
 	defaultUsbKvmConfig = &UsbKvmConfig{
 		ListeningAddress:        ":9000",
 		USBKVMDevicePath:        "/dev/ttyUSB0",
@@ -121,9 +121,10 @@ func startUsbKvmMode(config *UsbKvmConfig) error {
 	}
 
 	// Initiate the video capture device
-	videoCapture, err = usbcapture.NewInstance(&usbcapture.Config{
-		DeviceName:  config.VideoCaptureDevicePath,
-		AudioConfig: usbcapture.GetDefaultAudioConfig(),
+	usbCaptureDevice, err = usbcapture.NewInstance(&usbcapture.Config{
+		VideoDeviceName: config.VideoCaptureDevicePath,
+		AudioDeviceName: config.AudioCaptureDevicePath,
+		AudioConfig:     usbcapture.GetDefaultAudioConfig(),
 	})
 
 	if err != nil {
@@ -135,7 +136,7 @@ func startUsbKvmMode(config *UsbKvmConfig) error {
 	usbcapture.PrintV4L2FormatInfo(config.VideoCaptureDevicePath)
 
 	//Start the video capture device
-	err = videoCapture.StartVideoCapture(&usbcapture.CaptureResolution{
+	err = usbCaptureDevice.StartVideoCapture(&usbcapture.CaptureResolution{
 		Width:  config.CaptureResolutionWidth,
 		Height: config.CaptureResolutionHeight,
 		FPS:    config.CaptureResolutionFPS,
@@ -155,8 +156,8 @@ func startUsbKvmMode(config *UsbKvmConfig) error {
 			auxMCU.Close()
 		}
 		log.Println("Shutting down capture device...")
-		if videoCapture != nil {
-			videoCapture.Close()
+		if usbCaptureDevice != nil {
+			usbCaptureDevice.Close()
 		}
 		os.Exit(0)
 	}()

+ 7 - 363
remdeskd/www/index.html

@@ -3,377 +3,21 @@
 <head>
     <meta charset="UTF-8">
     <meta name="viewport" content="width=device-width, initial-scale=1.0">
-    <meta name="description" content="RemdesKVM Management Interface">
+    <meta name="description" content="dezuKVM Management Interface">
     <meta name="author" content="imuslab">
-    <title>Connected | RemdesKVM</title>
+    <title>Connected | dezuKVM</title>
     
     <!-- OpenGraph Metadata -->
-    <meta property="og:title" content="RemdesKVM Management Interface">
-    <meta property="og:description" content="A web-based management interface for RemdesKVM">
+    <meta property="og:title" content="dezuKVM Management Interface">
+    <meta property="og:description" content="A web-based management interface for dezuKVM">
     <meta property="og:type" content="website">
     <meta property="og:url" content="https://kvm.aroz.org">
     <meta property="og:image" content="https://kvm.aroz.org/og.jpg">
-    <script src="https://code.jquery.com/jquery-3.7.1.min.js" integrity="sha256-/JqT3SQfawRcv/BIHPThkBvs0OEvtFFmqPF/lYI/Cxo=" crossorigin="anonymous"></script>
-    <link rel="stylesheet" href="/main.css">
+    <script src="js/jquery-3.7.1.min.js"></script>
+    <link rel="stylesheet" href="main.css">
 </head>
 <body>
     <img id="remoteCapture" src="/stream" oncontextmenu="return false;"></img>
-    <script>
-        let socket;
-        let protocol = window.location.protocol === 'https:' ? 'wss' : 'ws';
-        let port = window.location.port ? window.location.port : (protocol === 'wss' ? 443 : 80);
-        let socketURL = `${protocol}://${window.location.hostname}:${port}/hid`;
-        let mouseMoveAbsolte = true; // Set to true for absolute mouse coordinates, false for relativeZ
-        let mouseIsOutside = false;
-        let audioFrontendStarted = false;
-
-        /* Mouse events */
-        function handleMouseMove(event) {
-            const hidCommand = {
-                event: 2,
-                mouse_x: event.clientX,
-                mouse_y: event.clientY,
-            };
-
-            const rect = event.target.getBoundingClientRect();
-            const relativeX = event.clientX - rect.left;
-            const relativeY = event.clientY - rect.top;
-            
-            if (relativeX < 0 || relativeY < 0 || relativeX > rect.width || relativeY > rect.height) {
-                mouseIsOutside = true;
-                return; // Mouse is outside the client rect
-            }
-            mouseIsOutside = false;
-            const percentageX = (relativeX / rect.width) * 4096;
-            const percentageY = (relativeY / rect.height) * 4096;
-
-            hidCommand.mouse_x = Math.round(percentageX);
-            hidCommand.mouse_y = Math.round(percentageY);
-
-            //console.log(`Mouse move: (${event.clientX}, ${event.clientY})`);
-            //console.log(`Mouse move relative: (${relativeX}, ${relativeY})`);
-            //console.log(`Mouse move percentage: (${hidCommand.mouse_x}, ${hidCommand.mouse_y})`);
-
-            if (socket && socket.readyState === WebSocket.OPEN) {
-                socket.send(JSON.stringify(hidCommand));
-            } else {
-                console.error("WebSocket is not open.");
-            }
-        }
-        
-
-        function handleMousePress(event) {
-            event.preventDefault();
-            event.stopImmediatePropagation();
-            if (mouseIsOutside) {
-                console.warn("Mouse is outside the capture area, ignoring mouse press.");
-                return;
-            }
-            const buttonMap = {
-                0: 1, 
-                1: 3,
-                2: 2
-            }; //Map javascript mouse buttons to HID buttons
-
-            const hidCommand = {
-                event: 3,
-                mouse_button: buttonMap[event.button] || 0
-            };
-
-            // Log the mouse button state
-            console.log(`Mouse down: ${hidCommand.mouse_button}`);
-
-            if (socket && socket.readyState === WebSocket.OPEN) {
-                socket.send(JSON.stringify(hidCommand));
-            } else {
-                console.error("WebSocket is not open.");
-            }
-
-            if (!audioFrontendStarted){
-                startAudioWebSocket();
-                audioFrontendStarted = true;
-            }
-        }
-
-        function handleMouseRelease(event) {
-            event.preventDefault();
-            event.stopImmediatePropagation();
-            if (mouseIsOutside) {
-                console.warn("Mouse is outside the capture area, ignoring mouse press.");
-                return;
-            }
-            const buttonMap = {
-                0: 1, 
-                1: 3,
-                2: 2
-            }; //Map javascript mouse buttons to HID buttons
-            
-            const hidCommand = {
-                event: 4,
-                mouse_button: buttonMap[event.button] || 0
-            };
-
-            console.log(`Mouse release: ${hidCommand.mouse_button}`);
-
-            if (socket && socket.readyState === WebSocket.OPEN) {
-                socket.send(JSON.stringify(hidCommand));
-            } else {
-                console.error("WebSocket is not open.");
-            }
-        }
-
-        function handleMouseScroll(event) {
-            const hidCommand = {
-                event: 5,
-                mouse_scroll: event.deltaY
-            };
-            if (mouseIsOutside) {
-                console.warn("Mouse is outside the capture area, ignoring mouse press.");
-                return;
-            }
-
-
-            console.log(`Mouse scroll: mouse_scroll=${event.deltaY}`);
-
-            if (socket && socket.readyState === WebSocket.OPEN) {
-                socket.send(JSON.stringify(hidCommand));
-            } else {
-                console.error("WebSocket is not open.");
-            }
-        }
-
-        // Attach mouse event listeners
-        let remoteCaptureEle = document.getElementById('remoteCapture');
-        remoteCaptureEle.addEventListener('mousemove', handleMouseMove);
-        remoteCaptureEle.addEventListener('mousedown', handleMousePress);
-        remoteCaptureEle.addEventListener('mouseup', handleMouseRelease);
-        remoteCaptureEle.addEventListener('wheel', handleMouseScroll);
-
-        /* Keyboard */
-        function isNumpadEvent(event) {
-            return event.location === 3;
-        }
-
-        function handleKeyDown(event) {
-            event.preventDefault();
-            event.stopImmediatePropagation();
-            const key = event.key;
-            let hidCommand = {
-                event: 0,
-                keycode: event.keyCode
-            };
-
-            console.log(`Key down: ${key} (code: ${event.keyCode})`);
-
-            // Check if the key is a modkey on the right side of the keyboard
-            const rightModKeys = ['Control', 'Alt', 'Shift', 'Meta'];
-            if (rightModKeys.includes(key) && event.location === 2) {
-                hidCommand.is_right_modifier_key = true;
-            }else if (key === 'Enter' && isNumpadEvent(event)) {
-                //Special case for Numpad Enter
-                hidCommand.is_right_modifier_key = true;
-            }else{
-                hidCommand.is_right_modifier_key = false;
-            }
-
-            if (socket && socket.readyState === WebSocket.OPEN) {
-                socket.send(JSON.stringify(hidCommand));
-            } else {
-                console.error("WebSocket is not open.");
-            }
-        }
-
-        function handleKeyUp(event) {
-            event.preventDefault();
-            event.stopImmediatePropagation();
-            const key = event.key;
-            
-            let hidCommand = {
-                event: 1,
-                keycode: event.keyCode
-            };
-
-            console.log(`Key up: ${key} (code: ${event.keyCode})`);
-
-            // Check if the key is a modkey on the right side of the keyboard
-            const rightModKeys = ['Control', 'Alt', 'Shift', 'Meta'];
-            if (rightModKeys.includes(key) && event.location === 2) {
-                hidCommand.is_right_modifier_key = true;
-            } else if (key === 'Enter' && isNumpadEvent(event)) {
-                //Special case for Numpad Enter
-                hidCommand.is_right_modifier_key = true;
-            }else{
-                hidCommand.is_right_modifier_key = false;
-            }
-
-
-            if (socket && socket.readyState === WebSocket.OPEN) {
-                socket.send(JSON.stringify(hidCommand));
-            } else {
-                console.error("WebSocket is not open.");
-            }
-        }
-
-        /* Start and Stop events */
-        function startWebSocket(){
-            if (socket){
-                //Already started
-                alert("Websocket already started");
-                return;
-            }
-            const socketUrl = socketURL;
-            socket = new WebSocket(socketUrl);
-
-            socket.addEventListener('open', function(event) {
-                console.log('WebSocket is connected.');
-            });
-
-            socket.addEventListener('message', function(event) {
-                //console.log('Message from server ', event.data);
-            });
-
-            document.addEventListener('keydown', handleKeyDown);
-            document.addEventListener('keyup', handleKeyUp);
-        }
-
-        function stopWebSocket(){
-            if (!socket){
-                alert("No ws connection to stop");
-                return;
-            }
-
-            socket.close();
-            console.log('WebSocket is disconnected.');
-            document.removeEventListener('keydown', handleKeyDown);
-            document.removeEventListener('keyup', handleKeyUp);
-        }
-
-        /* Audio Streaming Frontend */
-        let audioSocket;
-        let audioContext;
-        let audioQueue = [];
-        let audioPlaying = false;
-
-        //accept low, standard, high quality audio mode
-        function startAudioWebSocket(quality="standard") {
-            if (audioSocket) {
-                console.warn("Audio WebSocket already started");
-                return;
-            }
-            let protocol = window.location.protocol === 'https:' ? 'wss' : 'ws';
-            let port = window.location.port ? window.location.port : (protocol === 'wss' ? 443 : 80);
-            let audioSocketURL = `${protocol}://${window.location.hostname}:${port}/audio?quality=${quality}`;
-
-            audioSocket = new WebSocket(audioSocketURL);
-            audioSocket.binaryType = 'arraybuffer';
-
-            audioSocket.onopen = function() {
-                console.log("Audio WebSocket connected");
-                if (!audioContext) {
-                    audioContext = new (window.AudioContext || window.webkitAudioContext)({sampleRate: 24000});
-                }
-            };
-
-
-            const MAX_AUDIO_QUEUE = 8;
-            let PCM_SAMPLE_RATE;
-            if (quality == "high"){
-                PCM_SAMPLE_RATE = 48000; // Use 48kHz for high quality
-            } else if (quality == "low") {
-                PCM_SAMPLE_RATE = 16000; // Use 24kHz for low quality
-            } else {
-                PCM_SAMPLE_RATE = 24000; // Default to 24kHz for standard quality
-            }
-            let scheduledTime = 0;
-            audioSocket.onmessage = function(event) {
-                if (!audioContext) return;
-                let pcm = new Int16Array(event.data);
-                if (pcm.length === 0) {
-                    console.warn("Received empty PCM data");
-                    return;
-                }
-                if (pcm.length % 2 !== 0) {
-                    console.warn("Received PCM data with odd length, dropping last sample");
-                    pcm = pcm.slice(0, -1);
-                }
-                // Convert Int16 PCM to Float32 [-1, 1]
-                let floatBuf = new Float32Array(pcm.length);
-                for (let i = 0; i < pcm.length; i++) {
-                    floatBuf[i] = pcm[i] / 32768;
-                }
-                // Limit queue size to prevent memory overflow
-                if (audioQueue.length >= MAX_AUDIO_QUEUE) {
-                    audioQueue.shift();
-                }
-                audioQueue.push(floatBuf);
-                scheduleAudioPlayback();
-            };
-
-            audioSocket.onclose = function() {
-                console.log("Audio WebSocket closed");
-                audioSocket = null;
-                audioPlaying = false;
-                audioQueue = [];
-                scheduledTime = 0;
-            };
-
-            audioSocket.onerror = function(e) {
-                console.error("Audio WebSocket error", e);
-            };
-
-            function scheduleAudioPlayback() {
-                if (!audioContext || audioQueue.length === 0) return;
-
-                // Use audioContext.currentTime to schedule buffers back-to-back
-                if (scheduledTime < audioContext.currentTime) {
-                    scheduledTime = audioContext.currentTime;
-                }
-
-                while (audioQueue.length > 0) {
-                    let floatBuf = audioQueue.shift();
-                    let frameCount = floatBuf.length / 2;
-                    let buffer = audioContext.createBuffer(2, frameCount, PCM_SAMPLE_RATE);
-                    for (let ch = 0; ch < 2; ch++) {
-                        let channelData = buffer.getChannelData(ch);
-                        for (let i = 0; i < frameCount; i++) {
-                            channelData[i] = floatBuf[i * 2 + ch];
-                        }
-                    }
-                    let source = audioContext.createBufferSource();
-                    source.buffer = buffer;
-                    source.connect(audioContext.destination);
-                    source.start(scheduledTime);
-                    scheduledTime += buffer.duration;
-                }
-            }
-        }
-
-        function stopAudioWebSocket() {
-            if (!audioSocket) {
-                console.warn("No audio WebSocket to stop");
-                return;
-            }
-
-            if (audioSocket.readyState === WebSocket.OPEN) {
-                audioSocket.send("exit");
-            }
-            audioSocket.onclose = null; // Prevent onclose from being called again
-            audioSocket.onerror = null; // Prevent onerror from being called again
-            audioSocket.close();
-            audioSocket = null;
-            audioPlaying = false;
-            audioQueue = [];
-            if (audioContext) {
-                audioContext.close();
-                audioContext = null;
-            }
-        }
-
-        startWebSocket();
-
-        window.addEventListener('beforeunload', function() {
-            stopAudioWebSocket();
-        });
-    </script>
+    <script src="kvmevt.js"></script>
 </body>
 </html>

File diff suppressed because it is too large
+ 1 - 0
remdeskd/www/js/jquery-3.7.1.min.js


+ 379 - 0
remdeskd/www/kvmevt.js

@@ -0,0 +1,379 @@
+/*
+    kvmevt.js
+
+    Keyboard, Video, Mouse (KVM) over WebSocket client-side event handling.
+    Handles mouse and keyboard events, sending them to the server via WebSocket.
+    Also manages audio streaming from the server.
+*/
+const enableKvmEventDebugPrintout = false; //Set to true to enable debug printout
+const cursorCaptureElementId = "remoteCapture";
+let socket;
+let protocol = window.location.protocol === 'https:' ? 'wss' : 'ws';
+let port = window.location.port ? window.location.port : (protocol === 'wss' ? 443 : 80);
+let socketURL = `${protocol}://${window.location.hostname}:${port}/hid`;
+let mouseMoveAbsolute = true; // Set to true for absolute mouse coordinates, false for relative
+let mouseIsOutside = false; //Mouse is outside capture element
+let audioFrontendStarted = false; //Audio frontend has been started
+
+
+/* Mouse events */
+function handleMouseMove(event) {
+    const hidCommand = {
+        event: 2,
+        mouse_x: event.clientX,
+        mouse_y: event.clientY,
+    };
+
+    const rect = event.target.getBoundingClientRect();
+    const relativeX = event.clientX - rect.left;
+    const relativeY = event.clientY - rect.top;
+    
+    if (relativeX < 0 || relativeY < 0 || relativeX > rect.width || relativeY > rect.height) {
+        mouseIsOutside = true;
+        return; // Mouse is outside the client rect
+    }
+    mouseIsOutside = false;
+    const percentageX = (relativeX / rect.width) * 4096;
+    const percentageY = (relativeY / rect.height) * 4096;
+
+    hidCommand.mouse_x = Math.round(percentageX);
+    hidCommand.mouse_y = Math.round(percentageY);
+
+    if (enableKvmEventDebugPrintout) {
+        console.log(`Mouse move: (${event.clientX}, ${event.clientY})`);
+        console.log(`Mouse move relative: (${relativeX}, ${relativeY})`);
+        console.log(`Mouse move percentage: (${hidCommand.mouse_x}, ${hidCommand.mouse_y})`);
+    }
+
+    if (socket && socket.readyState === WebSocket.OPEN) {
+        socket.send(JSON.stringify(hidCommand));
+    } else {
+        console.error("WebSocket is not open.");
+    }
+}
+
+
+function handleMousePress(event) {
+    event.preventDefault();
+    event.stopImmediatePropagation();
+    if (mouseIsOutside) {
+        console.warn("Mouse is outside the capture area, ignoring mouse press.");
+        return;
+    }
+    /* Mouse buttons: 1=left, 2=right, 3=middle */
+    const buttonMap = {
+        0: 1, 
+        1: 3,
+        2: 2
+    }; //Map javascript mouse buttons to HID buttons
+
+    const hidCommand = {
+        event: 3,
+        mouse_button: buttonMap[event.button] || 0
+    };
+
+    // Log the mouse button state
+    if (enableKvmEventDebugPrintout) {
+        console.log(`Mouse down: ${hidCommand.mouse_button}`);
+    }
+
+    if (socket && socket.readyState === WebSocket.OPEN) {
+        socket.send(JSON.stringify(hidCommand));
+    } else {
+        console.error("WebSocket is not open.");
+    }
+
+    if (!audioFrontendStarted){
+        startAudioWebSocket();
+        audioFrontendStarted = true;
+    }
+}
+
+function handleMouseRelease(event) {
+    event.preventDefault();
+    event.stopImmediatePropagation();
+    if (mouseIsOutside) {
+        console.warn("Mouse is outside the capture area, ignoring mouse press.");
+        return;
+    }
+    /* Mouse buttons: 1=left, 2=right, 3=middle */
+    const buttonMap = {
+        0: 1, 
+        1: 3,
+        2: 2
+    }; //Map javascript mouse buttons to HID buttons
+    
+    const hidCommand = {
+        event: 4,
+        mouse_button: buttonMap[event.button] || 0
+    };
+
+    if (enableKvmEventDebugPrintout) {
+        console.log(`Mouse release: ${hidCommand.mouse_button}`);
+    }
+
+    if (socket && socket.readyState === WebSocket.OPEN) {
+        socket.send(JSON.stringify(hidCommand));
+    } else {
+        console.error("WebSocket is not open.");
+    }
+}
+
+function handleMouseScroll(event) {
+    const hidCommand = {
+        event: 5,
+        mouse_scroll: event.deltaY
+    };
+    if (mouseIsOutside) {
+        console.warn("Mouse is outside the capture area, ignoring mouse press.");
+        return;
+    }
+
+    if (enableKvmEventDebugPrintout) {
+        console.log(`Mouse scroll: mouse_scroll=${event.deltaY}`);
+    }
+
+    if (socket && socket.readyState === WebSocket.OPEN) {
+        socket.send(JSON.stringify(hidCommand));
+    } else {
+        console.error("WebSocket is not open.");
+    }
+}
+
+// Attach mouse event listeners
+let remoteCaptureEle = document.getElementById(cursorCaptureElementId);
+remoteCaptureEle.addEventListener('mousemove', handleMouseMove);
+remoteCaptureEle.addEventListener('mousedown', handleMousePress);
+remoteCaptureEle.addEventListener('mouseup', handleMouseRelease);
+remoteCaptureEle.addEventListener('wheel', handleMouseScroll);
+
+/* Keyboard */
+function isNumpadEvent(event) {
+    return event.location === 3;
+}
+
+function handleKeyDown(event) {
+    event.preventDefault();
+    event.stopImmediatePropagation();
+    const key = event.key;
+    let hidCommand = {
+        event: 0,
+        keycode: event.keyCode
+    };
+
+    if (enableKvmEventDebugPrintout) {
+        console.log(`Key down: ${key} (code: ${event.keyCode})`);
+    }
+
+    // Check if the key is a modkey on the right side of the keyboard
+    const rightModKeys = ['Control', 'Alt', 'Shift', 'Meta'];
+    if (rightModKeys.includes(key) && event.location === 2) {
+        hidCommand.is_right_modifier_key = true;
+    }else if (key === 'Enter' && isNumpadEvent(event)) {
+        //Special case for Numpad Enter
+        hidCommand.is_right_modifier_key = true;
+    }else{
+        hidCommand.is_right_modifier_key = false;
+    }
+
+    if (socket && socket.readyState === WebSocket.OPEN) {
+        socket.send(JSON.stringify(hidCommand));
+    } else {
+        console.error("WebSocket is not open.");
+    }
+}
+
+function handleKeyUp(event) {
+    event.preventDefault();
+    event.stopImmediatePropagation();
+    const key = event.key;
+    
+    let hidCommand = {
+        event: 1,
+        keycode: event.keyCode
+    };
+
+    if (enableKvmEventDebugPrintout) {
+        console.log(`Key up: ${key} (code: ${event.keyCode})`);
+    }
+
+    // Check if the key is a modkey on the right side of the keyboard
+    const rightModKeys = ['Control', 'Alt', 'Shift', 'Meta'];
+    if (rightModKeys.includes(key) && event.location === 2) {
+        hidCommand.is_right_modifier_key = true;
+    } else if (key === 'Enter' && isNumpadEvent(event)) {
+        //Special case for Numpad Enter
+        hidCommand.is_right_modifier_key = true;
+    }else{
+        hidCommand.is_right_modifier_key = false;
+    }
+
+
+    if (socket && socket.readyState === WebSocket.OPEN) {
+        socket.send(JSON.stringify(hidCommand));
+    } else {
+        console.error("WebSocket is not open.");
+    }
+}
+
+/* Start and Stop events */
+function startWebSocket(){
+    if (socket){
+        //Already started
+        console.warn("Invalid usage: HID Transport Websocket already started!");
+        return;
+    }
+    const socketUrl = socketURL;
+    socket = new WebSocket(socketUrl);
+
+    socket.addEventListener('open', function(event) {
+        console.log('HID Transport WebSocket is connected.');
+    });
+
+    socket.addEventListener('message', function(event) {
+        //Todo: handle control signals from server if needed
+        //console.log('Message from server ', event.data);
+    });
+
+    document.addEventListener('keydown', handleKeyDown);
+    document.addEventListener('keyup', handleKeyUp);
+}
+
+function stopWebSocket(){
+    if (!socket){
+        alert("No ws connection to stop");
+        return;
+    }
+
+    socket.close();
+    console.log('HID Transport WebSocket disconnected.');
+    document.removeEventListener('keydown', handleKeyDown);
+    document.removeEventListener('keyup', handleKeyUp);
+}
+
+/* Audio Streaming Frontend */
+let audioSocket;
+let audioContext;
+let audioQueue = [];
+let audioPlaying = false;
+
+//accept low, standard, high quality audio mode
+function startAudioWebSocket(quality="standard") {
+    if (audioSocket) {
+        console.warn("Audio WebSocket already started");
+        return;
+    }
+    let protocol = window.location.protocol === 'https:' ? 'wss' : 'ws';
+    let port = window.location.port ? window.location.port : (protocol === 'wss' ? 443 : 80);
+    let audioSocketURL = `${protocol}://${window.location.hostname}:${port}/audio?quality=${quality}`;
+
+    audioSocket = new WebSocket(audioSocketURL);
+    audioSocket.binaryType = 'arraybuffer';
+
+    audioSocket.onopen = function() {
+        console.log("Audio WebSocket connected");
+        if (!audioContext) {
+            audioContext = new (window.AudioContext || window.webkitAudioContext)({sampleRate: 24000});
+        }
+    };
+
+
+    const MAX_AUDIO_QUEUE = 8;
+    let PCM_SAMPLE_RATE;
+    if (quality == "high"){
+        PCM_SAMPLE_RATE = 48000; // Use 48kHz for high quality
+    } else if (quality == "low") {
+        PCM_SAMPLE_RATE = 16000; // Use 24kHz for low quality
+    } else {
+        PCM_SAMPLE_RATE = 24000; // Default to 24kHz for standard quality
+    }
+    let scheduledTime = 0;
+    audioSocket.onmessage = function(event) {
+        if (!audioContext) return;
+        let pcm = new Int16Array(event.data);
+        if (pcm.length === 0) {
+            console.warn("Received empty PCM data");
+            return;
+        }
+        if (pcm.length % 2 !== 0) {
+            console.warn("Received PCM data with odd length, dropping last sample");
+            pcm = pcm.slice(0, -1);
+        }
+        // Convert Int16 PCM to Float32 [-1, 1]
+        let floatBuf = new Float32Array(pcm.length);
+        for (let i = 0; i < pcm.length; i++) {
+            floatBuf[i] = pcm[i] / 32768;
+        }
+        // Limit queue size to prevent memory overflow
+        if (audioQueue.length >= MAX_AUDIO_QUEUE) {
+            audioQueue.shift();
+        }
+        audioQueue.push(floatBuf);
+        scheduleAudioPlayback();
+    };
+
+    audioSocket.onclose = function() {
+        console.log("Audio WebSocket closed");
+        audioSocket = null;
+        audioPlaying = false;
+        audioQueue = [];
+        scheduledTime = 0;
+    };
+
+    audioSocket.onerror = function(e) {
+        console.error("Audio WebSocket error", e);
+    };
+
+    function scheduleAudioPlayback() {
+        if (!audioContext || audioQueue.length === 0) return;
+
+        // Use audioContext.currentTime to schedule buffers back-to-back
+        if (scheduledTime < audioContext.currentTime) {
+            scheduledTime = audioContext.currentTime;
+        }
+
+        while (audioQueue.length > 0) {
+            let floatBuf = audioQueue.shift();
+            let frameCount = floatBuf.length / 2;
+            let buffer = audioContext.createBuffer(2, frameCount, PCM_SAMPLE_RATE);
+            for (let ch = 0; ch < 2; ch++) {
+                let channelData = buffer.getChannelData(ch);
+                for (let i = 0; i < frameCount; i++) {
+                    channelData[i] = floatBuf[i * 2 + ch];
+                }
+            }
+            let source = audioContext.createBufferSource();
+            source.buffer = buffer;
+            source.connect(audioContext.destination);
+            source.start(scheduledTime);
+            scheduledTime += buffer.duration;
+        }
+    }
+}
+
+function stopAudioWebSocket() {
+    if (!audioSocket) {
+        console.warn("No audio WebSocket to stop");
+        return;
+    }
+
+    if (audioSocket.readyState === WebSocket.OPEN) {
+        audioSocket.send("exit");
+    }
+    audioSocket.onclose = null; // Prevent onclose from being called again
+    audioSocket.onerror = null; // Prevent onerror from being called again
+    audioSocket.close();
+    audioSocket = null;
+    audioPlaying = false;
+    audioQueue = [];
+    if (audioContext) {
+        audioContext.close();
+        audioContext = null;
+    }
+}
+
+startWebSocket();
+
+window.addEventListener('beforeunload', function() {
+    stopAudioWebSocket();
+});

Some files were not shown because too many files changed in this diff