Linux之Gstreamer实现视频OSD
Linux之Gstreamer实现视频OSD
背景
何为OSD?
OSD(on-screen display)即屏幕菜单式调节方式。一般是按Menu键后屏幕弹出的显示器各项调节项目信息的矩形菜单,可通过该菜单对显示器各项工作指标包括色彩、模式、几何形状等进行调整,从而达到最佳的使用状态。
—— 百度百科
简单点说就是在视频上叠加一个自定义的图层显示,主要包括字符,图片,图形等。
Gstreamer是啥?
GStreamer is a library for constructing graphs of media-handling components. The applications it supports range from simple Ogg/Vorbis playback, audio/video streaming to complex audio (mixing) and video (non-linear editing) processing.
Applications can take advantage of advances in codec and filter technology transparently. Developers can add new codecs and filters by writing a simple plugin with a clean, generic interface. Read more …
GStreamer is released under the LGPL. The 1.x series is API and ABI stable and supersedes the previous stable 0.10 series. >Both can be installed in parallel.
来自Gstreamer官网:https://gstreamer.freedesktop.org/
简单点说就是一套包含有很多音视频处理组件的多媒体库。
Gstreamer实现视频OSD
上一篇,说了下在小算力的IPC芯片上一种通用的OSD实现方式:
https://notes.z-dd.online/2024/05/25/%E7%BA%A2%E5%A4%96%E7%83%AD%E6%88%90%E5%83%8F%E4%B8%8B%E7%AF%87–%E5%8F%A0%E5%8A%A0%E5%AD%97%E7%AC%A6OSD/
今天说说在通用Soc或CPU上实现通用视频OSD的方案,这里主要使用Gstreamer的插件实现,OpenCV的方案以后有机会再研究。
字符OSD主要是基于pango
实现,包括以下插件:
- clockoverlay: 在视频流上叠加当前时钟时间
- textoverlay:在视频缓冲区上层叠加文本字符串
- textrender:将文本字符串渲染到图像位图
- timeoverlay:在视频流上叠加缓冲区时间戳
图片OSD主要是基于gdkpixbuf
实现,主要使用以下插件:
- gdkpixbufoverlay:将图像叠加到视频流上
实例
下面的实例在RK3566和X86的Linux系统都已验证通过,主要测试了textoverlay
、timeoverlay
和gdkpixbufoverlay
。
命令行实现
#查看插件用法
gst-inspect-1.0 textoverlay
#叠字符
gst-launch-1.0 videotestsrc ! video/x-raw,width=640,height=480 ! textoverlay text="东西 textoverlay" ! videoconvert ! autovideosink
#叠时间和字符
gst-launch-1.0 videotestsrc ! video/x-raw,width=640,height=480 ! timeoverlay halignment=left valignment=bottom text="Stream time:" shaded-background=false ! xvimagesink
#叠图片
gst-launch-1.0 videotestsrc ! video/x-raw,width=640,height=480 ! textoverlay text="GoodBye" ! gdkpixbufoverlay location=logo.png overlay-height=100 overlay-width=100 alpha=0.8 ! videoconvert ! autovideosink
C实现
一个简单的C语音实现的例子,可以根据自己的需求进行扩展和修改。
//gcc gst-textoverlay.c -o gst-textoverlay `pkg-config --cflags --libs gstreamer-1.0`
#include <gst/gst.h>
#include <string.h>
#include <unistd.h>
#include <stdio.h>
typedef struct _CustomData {
gboolean is_live;
GstElement *pipeline;
GMainLoop *loop;
} CustomData;
static void * text_in(void * arg) {
GstElement* textoverlay = (GstElement*)arg;
gst_object_ref(textoverlay);
int i = 0;
gchar str[100];
for (;;) {
sleep(2);
sprintf(str,"text=>%d", i);
g_print("text => %d\n", i);
g_object_set(GST_ELEMENT(textoverlay), "text", str, NULL);
i++;
if (i > 10) {
break;
}
}
gst_object_unref(textoverlay);
return NULL;
}
static void cb_message (GstBus *bus, GstMessage *msg, CustomData *data) {
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR: {
GError *err;
gchar *debug;
gst_message_parse_error (msg, &err, &debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);
g_free (debug);
gst_element_set_state (data->pipeline, GST_STATE_READY);
g_main_loop_quit (data->loop);
break;
}
case GST_MESSAGE_EOS:
/* end-of-stream */
gst_element_set_state (data->pipeline, GST_STATE_READY);
g_main_loop_quit (data->loop);
break;
case GST_MESSAGE_BUFFERING: {
gint percent = 0;
/* If the stream is live, we do not care about buffering. */
if (data->is_live) break;
gst_message_parse_buffering (msg, &percent);
g_print ("Buffering (%3d%%)\r", percent);
/* Wait until buffering is complete before start/resume playing */
if (percent < 100)
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
else
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
break;
}
case GST_MESSAGE_CLOCK_LOST:
/* Get a new clock */
gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
break;
default:
/* Unhandled message */
break;
}
}
int main(int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstStateChangeReturn ret;
GMainLoop *main_loop;
CustomData data;
GstElement *textoverlay;
pthread_t thread_id;
/* Initialize GStreamer */
gst_init (&argc, &argv);
gchar *text_content;
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
/* Build the pipeline */
pipeline = gst_parse_launch ("videotestsrc ! video/x-raw,width=640,height=480 ! timeoverlay halignment=left valignment=bottom name=gst_text shaded-background=false ! gdkpixbufoverlay location=logo.jpg overlay-height=150 overlay-width=150 alpha=0.6 ! xvimagesink", NULL);
bus = gst_element_get_bus (pipeline);
textoverlay = gst_bin_get_by_name(GST_BIN(pipeline), "gst_text");
g_print("src-location: %s sink-location: %s", argv[1], argv[2]);
g_object_set(GST_ELEMENT(textoverlay), "text", "starting...", NULL);
g_object_get(GST_ELEMENT(textoverlay), "text", &text_content, NULL);
int err = pthread_create(&thread_id, NULL, &text_in, textoverlay);
if (err) {
g_print("text fun create failed\n");
goto err_tag;
} else {
g_print("text fun create ok!");
}
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
} else if (ret == GST_STATE_CHANGE_NO_PREROLL) {
data.is_live = TRUE;
}
main_loop = g_main_loop_new (NULL, FALSE);
data.loop = main_loop;
data.pipeline = pipeline;
gst_bus_add_signal_watch (bus);
g_signal_connect (bus, "message", G_CALLBACK (cb_message), &data);
g_main_loop_run (main_loop);
err_tag:
/* Free resources */
g_main_loop_unref (main_loop);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
效果
具体效果可看我的视频号: