wkt 发布的文章

一般想要逐帧处理视频图像,可以用opencv,代码可以如下:

#!/usr/bin/env python
# _*_ coding: utf-8 _*_
#
#
# Copyright (C) 2020 Wei Keting<weikting@gmail.com>. All rights reserved.
# @Time : 2020-06-26 11:51 
# @File : test_opencv.py
# @Description :
#
#

import sys
import cv2
import numpy as np
import os

f = sys.argv[1]
cap = cv2.VideoCapture(sys.argv[1])
fps = cap.get(cv2.CAP_PROP_FPS)
# create a window named `image`
cv2.imshow("image", np.random.randint(0, 255, (64, 64, 3), dtype='uint8'))
# set window's title
cv2.setWindowTitle("image", os.path.basename(f))
while True:
    ret, frame = cap.read()
    if ret is False:
        break
    cv2.imshow("image", frame)
    # 控制帧率
    cv2.waitKey(int(1000/fps))

运行:
       python3 test_opencv.py path_of_the_video.webm

不过使用opencv不会播放声音,当逐帧处理图像的同时还需要播放声音时,可以考虑使用gstreamer。
python3代码:
 

#!/usr/bin/env python3
# _*_ coding: utf-8 _*_
#
#
# Copyright (C) 2020 Wei Keting<weikting@gmail.com>. All rights reserved.
# @Time : 2020-06-26 10:26
# @File : test_gst.py
# @Description :
#
#

import os
import sys

import gi

gi.require_version('Gst', '1.0')
gi.require_version("GstVideo", "1.0")
from gi.repository import Gst, GLib, GstVideo
import numpy as np
import cv2


# noinspection PyMethodMayBeStatic
class GstDemo:

    def __init__(self):
        self.frame_idx = 0
        self.player = Gst.ElementFactory.make("playbin", "player")
        appsink = Gst.ElementFactory.make("appsink", "sink")
        appsink.set_property("emit-signals", True)  # 发送new-sample信号

        caps = Gst.Caps.new_empty_simple("video/x-raw")
        caps.set_value('format', "BGRA")  # BGR有点问题,所以不用
        appsink.set_property("caps", caps)  # 设置appsink接受BGRA的数据格式

        appsink.connect("new-sample", self._on_new_sample)

        self.player.set_property("video-sink", appsink)
        bus = self.player.get_bus()
        bus.add_signal_watch()
        bus.connect("message", self.on_message)
        self.loop = GLib.MainLoop()

    def _on_new_sample(self, sink):
        """
        读取并处理视频帧图像
        :param sink:
        :return:
        """
        self.frame_idx += 1
        sample = sink.emit('pull_sample')
        if sample is None:
            return Gst.FlowReturn.EOS
        caps = sample.get_caps()
        buffer = sample.get_buffer()
        structure = caps.get_structure(0)
        _, width = structure.get_int("width")
        _, height = structure.get_int("height")
        success, map_info = buffer.map(Gst.MapFlags.READ)
        if self.frame_idx == 1:
            print("caps: ", caps.to_string(), "size:", map_info.size)
        img = np.ndarray(
            shape=(height, width, 4),  # BGRA 是4通道
            dtype=np.uint8,
            buffer=map_info.data)
        img = img[:, :, :3]  # 放弃Alpha通道

        # 在主线程调用cv2.imshow,否则crash
        GLib.idle_add(self._idle_show_image, img)
        os.makedirs("/tmp/cv", exist_ok=True)
        cv2.imwrite("/tmp/cv/{:06d}.jpg".format(self.frame_idx), img)
        return Gst.FlowReturn.OK

    def _idle_show_image(self, image):
        cv2.imshow("test", image)
        return GLib.SOURCE_REMOVE

    def on_message(self, bus, message):
        t = message.type
        if t == Gst.MessageType.EOS:
            self.player.set_state(Gst.State.NULL)
            self.loop.quit()
        elif t == Gst.MessageType.ERROR:
            self.player.set_state(Gst.State.NULL)
            err, debug = message.parse_error()
            print("Error: %s" % err, debug)
            self.loop.quit()

    def start(self):
        for filepath in sys.argv[1:]:
            if os.path.isfile(filepath):
                filepath = os.path.abspath(filepath)
                self.player.set_property("uri", "file://" + filepath)
                self.player.set_state(Gst.State.PLAYING)
                break


Gst.init(None)
demo = GstDemo()
demo.start()
demo.loop.run()

运行测试:

       python3 test_gst.py path_of_the_video.webm

C代码:

/**
*
* Copyright (C) 2020 Wei Keting<weikting@gmail.com>. All rights reserved.
* @Time : 2020-06-25 09:22
* @File : test_gst1.c
* @Description :
*
**/

#include <gst/gst.h>
#include <gst/app/app.h>
#include <gst/video/video.h>
#include <cairo.h>
#include <stdio.h>
#include <glib/gstdio.h>

#include <gtk/gtk.h>

#if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define FORMAT "BGRx" /* BGRA */
#else
#define FORMAT "xRGB" /* ARGB */
#endif

static GstFlowReturn
on_new_sample (GstAppSink *appsink, GtkWidget *image)
{
    static gint frame_idx = 0;

    GstSample *sample = gst_app_sink_pull_sample(appsink);
    if(sample == NULL){
        if(gst_app_sink_is_eos(appsink))
            return GST_FLOW_EOS;
    }

    GstBuffer *buffer = gst_sample_get_buffer(sample);
    GstCaps *caps = gst_sample_get_caps(sample);
    GstMapInfo map_info;
    gint width,height;

    gst_buffer_map(buffer,&map_info,GST_MAP_READ);
    GstStructure *structure = gst_caps_get_structure(caps,0);
    gst_structure_get_int(structure,"width",&width);
    gst_structure_get_int(structure,"height",&height);

    frame_idx += 1;
    cairo_format_t format;
    cairo_surface_t *surface;
    format = CAIRO_FORMAT_ARGB32;
    surface = cairo_image_surface_create_for_data (map_info.data,
        format, width, height, cairo_format_stride_for_width(format,width));
    gtk_image_set_from_surface(GTK_IMAGE(image),surface);

    char filename[128] = {0};
    g_mkdir_with_parents("/tmp/pictures",0700);
    snprintf(filename,sizeof(filename),"/tmp/pictures/%06d.png",frame_idx);
    cairo_status_t st = cairo_surface_write_to_png(surface,filename);
    if(st != CAIRO_STATUS_SUCCESS){
        g_printerr("st:%s\n",cairo_status_to_string(st));
    }

    cairo_surface_destroy(surface);
    gst_sample_unref(sample);
    return GST_FLOW_OK;
}

static void
on_bus_message (GstBus    *bus,
               GstMessage *message,
               GstElement *pipeline)
{
    switch(GST_MESSAGE_TYPE(message)){
    case GST_MESSAGE_EOS:
    case GST_MESSAGE_ERROR:
        gst_element_set_state(pipeline,GST_STATE_NULL);
        gtk_main_quit();
        break;
    default:
        break;
    }
}

static void
on_win_destroy(GtkWidget*win,GstElement *pipeline)
{
    gst_element_set_state(pipeline,GST_STATE_NULL);
    gtk_main_quit();
}

int main(int argc, char *argv[]) {
    GstElement *pipeline,*appsink;
    GstCaps *caps;
    GstBus *bus;
    GtkWidget *win,*image;

    gtk_init(&argc,&argv);
    gst_init (&argc, &argv);

    win = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    image = gtk_image_new();
    gtk_container_add(GTK_CONTAINER(win),image);
    gtk_widget_show_all(win);
    gtk_window_present(GTK_WINDOW(win));

    gchar *args = g_strdup_printf("playbin uri=file://%s",argv[1]);
    pipeline = gst_parse_launch (args, NULL);
    g_free(args);

    appsink = gst_element_factory_make("appsink","sink");
    caps = gst_caps_new_simple("video/x-raw","format", G_TYPE_STRING, FORMAT, NULL);
    g_object_set(appsink,"emit-signals",TRUE,NULL);
    g_object_set(appsink,"caps",caps,NULL);
    g_signal_connect(appsink,"new-sample",G_CALLBACK(on_new_sample),image);

    g_object_set(pipeline,"video-sink",appsink,NULL);

    bus = gst_element_get_bus (pipeline);
    gst_bus_add_signal_watch(bus);
    g_signal_connect(bus,"message",G_CALLBACK(on_bus_message),pipeline);

    g_signal_connect(win,"destroy",G_CALLBACK(on_win_destroy),pipeline);
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    gtk_main();
    gst_object_unref (bus);
    gst_object_unref (pipeline);
    return 0;
}

运行测试:

      gcc test_gst1.c -o test_gst1 pkg-config --cflags --libs gstreamer-1.0 gstreamer-app-1.0 gstreamer-video-1.0 cairo gtk+-3.0
     ./test_gst1 path_of_the_video.mp4

Gtk 实现图片圆角和圆化的方法

代码:

#!/usr/bin/env python3
# _*_ coding: utf-8 _*_
#
#
# @File : test_round_image.py
# @Time : 2021-12-10 22:02 
# Copyright (C) 2021 WeiKeting<weikting@gmail.com>. All rights reserved.
# @Description :
#
#
import math

import gi

gi.require_version('GdkPixbuf', '2.0')
gi.require_version('Gtk', '3.0')
from gi.repository import GdkPixbuf, Gtk, Gdk


def rounded_image(fn, size=None, corner_radius=0):
    import cairo
    if size is not None and size[0] > 0 and size[1] > 0:
        piu = GdkPixbuf.Pixbuf.new_from_file_at_scale(fn, size[0], size[1], True)
    else:
        piu = GdkPixbuf.Pixbuf.new_from_file(fn)
    w = piu.get_width()
    h = piu.get_height()
    if corner_radius > 0:
        # 圆角
        r = min(corner_radius, w / 2., h / 2)
        surface = cairo.ImageSurface(cairo.Format.ARGB32, w, h)
        ctx = cairo.Context(surface)

        Gdk.cairo_set_source_pixbuf(ctx, piu, 0, 0)

        # top left 圆角
        ctx.arc(r, r, r, -math.pi, -math.pi / 2.)
        # top line
        ctx.line_to(w - r, 0)

        # top right 圆角
        ctx.arc(w - r, r, r, -math.pi / 2., 0)
        # right line
        ctx.line_to(w, -r)

        # bottom right 圆角
        ctx.arc(w - r, h - r, r, 0, math.pi / 2.)
        # bottom line
        ctx.line_to(r, h)

        # bottom left 圆角
        ctx.arc(r, h - r, r, math.pi / 2., math.pi)
        # 连接当前path的起点和重点,即left line
        ctx.close_path()

        # 创建clip(可绘制区域)
        ctx.clip()

        ctx.paint()
        piu = Gdk.pixbuf_get_from_surface(surface, 0, 0, w, h)
    else:
        # 圆边
        sw, sh = (min(w, h),)*2
        surface = cairo.ImageSurface(cairo.Format.ARGB32, sw, sh)
        ctx = cairo.Context(surface)
        # 把原图的pixbuf,居中显示
        Gdk.cairo_set_source_pixbuf(ctx, piu, -(w - sh) / 2.0, -(h - sh) / 2.0)
        # 圆形path
        ctx.arc(sw / 2.0, sh / 2., sh / 2.0, 0, 2 * math.pi)
        # 以当前path(圆形),创建clip(可绘制区域)
        ctx.clip()
        # 把source(pixbuf)画到clip中
        ctx.paint()
        piu = Gdk.pixbuf_get_from_surface(surface, 0, 0, sw, sh)
    return piu


def main():
    import sys
    f = sys.argv[1]
    pb = rounded_image(f, size=None, corner_radius=0)
    win = Gtk.Window()
    image = Gtk.Image()
    image.set_from_pixbuf(pb)
    win.add(image)
    win.set_default_size(300, 400)
    win.show_all()
    win.present()
    win.connect("delete-event", Gtk.main_quit)
    Gtk.main()


if __name__ == '__main__':
    main()

代码虽然是python写的,但是看一眼应该就可以写出C语言的版本。

测试环境: Ubuntu 21.10
不过我相信支持python3和Gtk3的环境应该都能跑起来