Write opencv frames into gstreamer rtsp server pipeline
Asked Answered
M

1

12

I'm trying to put opencv images into a gstreamer rtsp server in python. I have some issue writing in the mediafactory, I'm new to gst-rtsp-server ancd there's little documentation so I don't know exactly if I'm using the right approach. I'm using a thread to start the MainLoop and I'm using the main thread to create a buffer to push in the appsrc element of the mediafactory pipeline. Am I using the right approach to obtain my objective? Can anyone help me? My code is below:

from threading import Thread
from time import clock

import cv2
import gi

gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GObject


class SensorFactory(GstRtspServer.RTSPMediaFactory):
    def __init__(self, **properties):
        super(SensorFactory, self).__init__(**properties)
        self.launch_string = 'appsrc ! video/x-raw,width=320,height=240,framerate=30/1 ' \
                             '! videoconvert ! x264enc speed-preset=ultrafast tune=zerolatency ' \
                             '! rtph264pay config-interval=1 name=pay0 pt=96'
        self.pipeline = Gst.parse_launch(self.launch_string)
        self.appsrc = self.pipeline.get_child_by_index(4)

    def do_create_element(self, url):
        return self.pipeline


class GstServer(GstRtspServer.RTSPServer):
    def __init__(self, **properties):
        super(GstServer, self).__init__(**properties)
        self.factory = SensorFactory()
        self.factory.set_shared(True)
        self.get_mount_points().add_factory("/test", self.factory)
        self.attach(None)


GObject.threads_init()
Gst.init(None)

server = GstServer()

loop = GObject.MainLoop()
th = Thread(target=loop.run)
th.start()

print('Thread started')

cap = cv2.VideoCapture(0)

print(cap.isOpened())

frame_number = 0

fps = 30
duration = 1 / fps

timestamp = clock()

while cap.isOpened():
    ret, frame = cap.read()
    if ret:

        print('Writing buffer')

        data = frame.tostring()

        buf = Gst.Buffer.new_allocate(None, len(data), None)
        buf.fill(0, data)
        buf.duration = fps
        timestamp = clock() - timestamp
        buf.pts = buf.dts = int(timestamp)
        buf.offset = frame_number
        frame_number += 1
        retval = server.factory.appsrc.emit('push-buffer', buf)
        print(retval)

        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

cap.release()

By the way I tried to copy the buffer creation from opencv source code but I'm not sure I correctly trandlated the c++ code in python.

Mascia answered 20/11, 2017 at 16:13 Comment(0)
M
14

I found the solution, lots of stuff was missing.

  • I used the need-data signal like the examples of gst-rtsp-server.
  • I changed some default parameters of appsrc, like the is-live, block and format.
  • The caps on the appsrc element.
  • I was not setting the offset of the buffer correctly.

Here's the code for anyone who's facing the same problem or has a somewhat similar one.

#!/usr/bin/env python3

import cv2
import gi

gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GObject


class SensorFactory(GstRtspServer.RTSPMediaFactory):
    def __init__(self, **properties):
        super(SensorFactory, self).__init__(**properties)
        self.cap = cv2.VideoCapture(0)
        self.number_frames = 0
        self.fps = 30
        self.duration = 1 / self.fps * Gst.SECOND  # duration of a frame in nanoseconds
        self.launch_string = 'appsrc name=source is-live=true block=true format=GST_FORMAT_TIME ' \
                             'caps=video/x-raw,format=BGR,width=640,height=480,framerate={}/1 ' \
                             '! videoconvert ! video/x-raw,format=I420 ' \
                             '! x264enc speed-preset=ultrafast tune=zerolatency ' \
                             '! rtph264pay config-interval=1 name=pay0 pt=96'.format(self.fps)

    def on_need_data(self, src, lenght):
        if self.cap.isOpened():
            ret, frame = self.cap.read()
            if ret:
                data = frame.tostring()
                buf = Gst.Buffer.new_allocate(None, len(data), None)
                buf.fill(0, data)
                buf.duration = self.duration
                timestamp = self.number_frames * self.duration
                buf.pts = buf.dts = int(timestamp)
                buf.offset = timestamp
                self.number_frames += 1
                retval = src.emit('push-buffer', buf)
                print('pushed buffer, frame {}, duration {} ns, durations {} s'.format(self.number_frames,
                                                                                       self.duration,
                                                                                       self.duration / Gst.SECOND))
                if retval != Gst.FlowReturn.OK:
                    print(retval)

    def do_create_element(self, url):
        return Gst.parse_launch(self.launch_string)

    def do_configure(self, rtsp_media):
        self.number_frames = 0
        appsrc = rtsp_media.get_element().get_child_by_name('source')
        appsrc.connect('need-data', self.on_need_data)


class GstServer(GstRtspServer.RTSPServer):
    def __init__(self, **properties):
        super(GstServer, self).__init__(**properties)
        self.factory = SensorFactory()
        self.factory.set_shared(True)
        self.get_mount_points().add_factory("/test", self.factory)
        self.attach(None)


GObject.threads_init()
Gst.init(None)

server = GstServer()

loop = GObject.MainLoop()
loop.run()
Mascia answered 30/11, 2017 at 10:34 Comment(20)
no matter which way i went. gstreamer catching which is used for shared medias doesn't work properly. a lot of errors gets generated by it. i recommend setting the "shared" property to false. it's interesting that the errors pile up when the processor is busier, but they make no mention of the need for more processing power.Elfredaelfrida
I struggled with this solution for some time because I was using cv2.get(cv2.CAP_PROP_FPS), This function returns float value instead of int and it didn't cause an error in the pipeline but the client side failed to read the stream. I just want to add one point to the solution, if you are using cv2.get() to fill in height, width, fps, etc, then please make sure to convert them into int. It will work as as expected then.Deil
I am getting Assertion fctx->async_lock failed at libavcodec/pthread_frame.c:155 when trying to use multiple clients.Deil
It breaks if two vlc are requesting for the rtsp stream. number_frames reset to zeroTempest
It seems you're overriding a method RTSPMediaFactory.do_configure() but it doesn't seem to be documented anywhere. Do you know where I can find further information on this?Turpin
@Deil I had the same problem but with newer versions of gstreamer, before it was working fine. I do not know how to solve, in my company we ended up buying a plugin from a consulting company to solve the issue, it was cheaper for us to that then dedicate a programmer to itMascia
@Turpin I am afraid not, there is the signal on gstreamer website but it is not documented. Basically that method gets called before the media factory is instantiated by the server. link to docsMascia
@Elfredaelfrida sorry for not replying earlier, it depends by the pipeline you are using, you can use a queue and drop buffers when the cpu is too busy.Mascia
@Mascia Thanks!Turpin
What should I do to see the output ? ' gst-launch-1.0 appsrc emit-signals=True is-live=True caps=video/x-raw,format=RGB,width=640,height=480,framerate=30/1 ! queue max-size-buffers=4 ! videoconvert ! autovideosink' It does not work. Am I doing wrong ? There is no output. Which command you use to see the output ?Exaction
@Exaction gst-launch-1.0 -v rtspsrc location="rtsp://<host>:port/test" ! rtph264depay ! h264parse config-interval=1 ! avdec_h264 ! videoconvert ! autovideosinkMascia
@Mascia What are the host and port values ? In the example they are not exist ? Is there any default value for them ?Exaction
@Mascia Default port number is 8554. python output says that frames are written but I could not see still any window. Texts are going but there is no image. What should I do ?Exaction
Please open another question on stackoverflow and link it because I think you are the only that has the problem and that more information is needed, probably other people might benefit from itMascia
Capture screen from mss is not working #needhelp def on_need_data(self, src, lenght): monitor = {"top": 120, "left": 280, "width": 640, "height": 480} with mss.mss() as sct: while True: grab = sct.grab(monitor) img = np.array(grab) img = cv2.resize(img, (640, 480)) # frame = img frame = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) data = frame.tostring() ****Knur
I am sorry, I do not understand what are you trying to do, please create a new question and feel free to mention me in the commentsMascia
did you guys have low latency using this code? I tried but I have a delay of around 5s using my webcam, when I connect to the rtsp using VLC.Stilbestrol
@GuilhermeCorrea vlc by default buffers 5 seconds, go to settings and lower the buffer to 0 msMascia
Thank you for your awnser @Mascia . Reducing the buffer solved the delay issue, but right now, when I do some processing in the image, taking around 0.5s to process each frame, the delay goes up to 10 seconds. Which does not make a lot of sense to me. Any ideas on how to solve this?Stilbestrol
I suggest to create a new question as it is out of the scope of this question, in case you can paste the link hereMascia

© 2022 - 2024 — McMap. All rights reserved.