How to stream your Raspberry Pi camera (using PiCamera2) as mjpg

Views: 47

A Raspberry Pi with an attached PiCamera can work as a simple surveillance system. A very simple way to stream the images is using MJPEG.

To quickly set up streaming MJPEG, I created a Python script that encodes the images in separate threads. Therefore the whole CPU of the Pi can be utilized. The maximum framerate is equal to what the camera can deliver (47 frames). On a Pi 4 this makes the CPU quite hot, so the script allows to throttle the throughput. 25 FPS is easily possible without any cooling.

Prerequisites

A Raspberry Pi with an attached camera module. I tested this on a Pi 2, 3 and 4, it probably works on a Pi 1 just as well, albeit with a severely reduced framerate.

Install the latest version of the OS:

sudo apt update
sudo apt upgrade

Install necessary python modules

sudo apt install python3-flask python3-libcamera python3-picamera2 python3-opencv

And then run the script

python pystream.py

The stream will be available at port 9000 of your Raspberry Pi: http://your-pi-IP:9000/mjpg

If you run this in a terminal, the process will of course stop once the window is closed, unless you run it using screen. If you haven’t installed it yet:

sudo apt install screen

Start the screen by just typing

screen

A short explanation will show, you can end that with <RETURN>. Anything you start in this will continue to run, even if you close the window.

To get back to a running screen process, just type

screen -r

The script (download here):

#!/usr/bin/env python3

from flask import Flask, Response
import cv2
from picamera2 import Picamera2
import libcamera
import time
import threading

# Creates an mjpg stream from a PiCamera. It's a quick and dirty example how to use your Pi as a surveillance camera
# Uses 2 separate threads to encode the captured image to maximize throughput by using all 4 cores
# The way this "double-buffering" is implemented causes it to only work correctly with one client. 


# Set your desired image size here
# The timing for approximating the frames per second depends largely on this setting
# Larger images means more time needed for processing
CAM_X = 1280
CAM_Y = 720


# Change this, depending on the orientation of your camera module
CAM_HFLIP = False
CAM_VFLIP = False

# Change this to control the usage and therefore temperature of your Pi. On my Pi 4 a setting of 25 FPS
# results in CPU usage of roughly 40% and no temperature throttling (no additional cooling here)
# Set to 0 to impose no restrictions (on my Pi 4 this results in ~47 FPS (maximum of my PiCamera model 2), on my Pi 2 ~17 FPS)
MAX_FPS = 25

# Flask is our "webserver"
# The URL to the mjpg stream is http://my.server:WEB_PORT/mjpg
WEB_PORT = 9000
app = Flask(__name__)

# Keeps all data for the various threads in one place
class MgtData(object):
    stop_tasks = False            # If set, all threads should stop their processing
    frame1_has_new_data = False   # Is being set when frame1 receives a new buffer to encode
    lock1 = False                 # Is being set when frame1 receives a new buffer to encode, and cleared when encoding is done
    frame2_has_new_data = False   # Same for frame 2
    lock2 = False

    img_buffer1 = None            # Receives the image as a byte array for frame 1
    img_buffer2 = None            # ... for frame 2
    encoded_frame1 = None         # Stores the JPG-encoded image for frame 1
    encoded_frame2 = None         # ... for frame 2

    # If there is new data available on frame 1, return True
    def frame1_new_data():
        return (MgtData.frame1_has_new_data and not MgtData.lock1)

    # If there is new data available on frame 2, return True
    def frame2_new_data():
        return (MgtData.frame2_has_new_data and not MgtData.lock2)


# Deliver the individual frames to the client
@app.route('/mjpg')
def video_feed():
    generated_data  = gen()
    if (generated_data):
        response = Response(gen(), mimetype='multipart/x-mixed-replace; boundary=frame') 
        response.headers.add("Access-Control-Allow-Origin", "*")
        return response
    return None

# Generate the individual frame data
def gen():
    while not MgtData.stop_tasks:  
        while (not (MgtData.frame1_new_data() or MgtData.frame2_new_data())):
            time.sleep (0.01) # Wait until we have data from one of the encode-threads

        frame = get_frame()
        yield (b'--frame\r\n'
               b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')

# If one of the frames has data already processed, deliver the respective encoded image
def get_frame():
    encoded_frame = None
    if (MgtData.frame1_new_data() or MgtData.frame2_new_data()):
        if (MgtData.frame1_new_data ()):
            encoded_frame = MgtData.encoded_frame1
            MgtData.frame1_has_new_data = False
        elif (MgtData.frame2_new_data ()):
            encoded_frame = MgtData.encoded_frame2
            MgtData.frame2_has_new_data = False
    else:
        print ("Duplicate frame")

    return encoded_frame




# Start the server
def start_webserver():
    try:
        app.run(host='0.0.0.0', port=WEB_PORT, threaded=True, debug=False)
    except Exception as e:
        print(e)

# Definition for the encoding thread for frame 1
def encode1():
    newEncFrame = cv2.imencode('.jpg', MgtData.img_buffer1)[1].tobytes()
    MgtData.encoded_frame1 = newEncFrame
    MgtData.frame1_has_new_data = True
    MgtData.lock1 = False

# Definition for the encoding thread for frame 2
def encode2():
    MgtData.lock2 = True
    MgtData.frame2_has_new_data = True
    newEncFrame = cv2.imencode('.jpg', MgtData.img_buffer2)[1].tobytes()
    MgtData.encoded_frame2 = newEncFrame
    MgtData.lock2 = False



def run_camera():
    # init picamera
    picam2 = Picamera2()

    preview_config = picam2.preview_configuration
    preview_config.size = (CAM_X, CAM_Y)
    preview_config.format = 'RGB888'
    preview_config.transform = libcamera.Transform(hflip=CAM_HFLIP, vflip=CAM_VFLIP)
    preview_config.colour_space = libcamera.ColorSpace.Sycc()
    preview_config.buffer_count = 4 # Looks like 3 is the minimum on my system to get the full 47 FPS my camera is capable of
    preview_config.queue = True
    preview_config.controls = {'FrameRate': MAX_FPS and MAX_FPS or 100}

    try:
        picam2.start()

    except Exception as e:
        print(e)
        print("Is the camera connected correctly?\nYou can use \"libcamea-hello\" or \"rpicam-hello\" to test the camera.")
        exit(1)
    
    fps = 0
    start_time = 0
    framecount = 0
    try:
        start_time = time.time()
        while (not MgtData.stop_tasks):
            if (not (MgtData.frame1_new_data() and MgtData.frame2_new_data())):

                # get image data from camera
                my_img = picam2.capture_array()

                # calculate fps
                framecount += 1
                elapsed_time = float(time.time() - start_time)
                if (elapsed_time > 1):
                    fps = round(framecount/elapsed_time, 1)
                    framecount = 0
                    start_time = time.time()
                    print ("FPS: ", fps)

                # if one of the two frames is available to store new data, copy the captured image to the
                # respective buffer and start the encoding thread
                # At max we have 4 threads: our main thread, flask, encode1 and encode2
                if (not MgtData.frame1_new_data()):
                    MgtData.img_buffer1 = my_img
                    MgtData.frame1_has_new_data = True
                    MgtData.lock1 = True
                    encode_thread1 = threading.Thread(target=encode1, name="encode1")
                    encode_thread1.start()
                elif (not MgtData.frame2_new_data()):
                    MgtData.img_buffer2 = my_img
                    MgtData.frame2_has_new_data = True
                    MgtData.lock2 = True
                    encode_thread2 = threading.Thread(target=encode2, name="encode1")
                    encode_thread2.start()
            time.sleep (0.0005) # No need to constantly poll, cut the CPU some slack
            
    except KeyboardInterrupt as e:
        print(e)
        MgtData.stop_tasks
    finally:
        picam2.close()
        cv2.destroyAllWindows()



def streamon():
    camera_thread = threading.Thread(target= run_camera, name="camera_streamon")
    camera_thread.daemon = False
    camera_thread.start()

    if camera_thread != None and camera_thread.is_alive():
        print('Starting web streaming ...')
        flask_thread = threading.Thread(name='flask_thread',target=start_webserver)
        flask_thread.daemon = True
        flask_thread.start()
    else:
        print('Error starting the stream')

    while not MgtData.stop_tasks:
        time.sleep (25) # Just waiting to end this thread



if __name__ == "__main__":
    try:
        streamon()
    except KeyboardInterrupt:
        pass
    except Exception as e:
        print(e)
    finally:
        print ("Closing...")
        MgtData.stop_tasks = True