#!/usr/bin/env python3 import json import socketserver import threading import time from http.server import BaseHTTPRequestHandler, HTTPServer from io import BytesIO from pathlib import Path import sys from socketserver import ThreadingMixIn from time import sleep import depthai as dai import numpy as np import cv2 from PIL import Image import blobconverter HTTP_SERVER_PORT = 8080 class TCPServerRequest(socketserver.BaseRequestHandler): def handle(self): header = 'HTTP/1.0 200 OK\r\nServer: Mozarella/2.2\r\nAccept-Range: bytes\r\nConnection: close\r\nMax-Age: 0\r\nExpires: 0\r\nCache-Control: no-cache, private\r\nPragma: no-cache\r\nContent-Type: application/json\r\n\r\n' self.request.send(header.encode()) while True: sleep(0.1) if hasattr(self.server, 'datatosend'): self.request.send(self.server.datatosend.encode() + "\r\n".encode()) # HTTPServer MJPEG class VideoStreamHandler(BaseHTTPRequestHandler): def do_GET(self): self.send_response(200) self.send_header('Content-type', 'multipart/x-mixed-replace; boundary=--jpgboundary') self.end_headers() while True: sleep(0.1) if hasattr(self.server, 'frametosend'): image = Image.fromarray(cv2.cvtColor(self.server.frametosend, cv2.COLOR_BGR2RGB)) stream_file = BytesIO() image.save(stream_file, 'JPEG') self.wfile.write("--jpgboundary".encode()) self.send_header('Content-type', 'image/jpeg') self.send_header('Content-length', str(stream_file.getbuffer().nbytes)) self.end_headers() image.save(self.wfile, 'JPEG') class ThreadedHTTPServer(ThreadingMixIn, HTTPServer): """Handle requests in a separate thread.""" pass # start MJPEG HTTP Server server_HTTP = ThreadedHTTPServer(('0.0.0.0', HTTP_SERVER_PORT), VideoStreamHandler) th = threading.Thread(target=server_HTTP.serve_forever) th.daemon = True th.start() # Create pipeline pipeline = dai.Pipeline() # Define source and output camRgb = pipeline.createColorCamera() xoutVideo = pipeline.createXLinkOut() xoutVideo.setStreamName("rgb") # Properties camRgb.setBoardSocket(dai.CameraBoardSocket.RGB) camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P) camRgb.setVideoSize(1920, 1080) camRgb.initialControl.setManualFocus(120) xoutVideo.input.setBlocking(False) xoutVideo.input.setQueueSize(1) # Linking camRgb.video.link(xoutVideo.input) # Connect to device and start pipeline with dai.Device(pipeline) as device: previewQueue = device.getOutputQueue(name="rgb", maxSize=1, blocking=False) while True: inPreview = previewQueue.get() frame = inPreview.getCvFrame() server_HTTP.frametosend = frame