From patchwork Sun Mar 19 11:30:13 2023 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Tomi Valkeinen X-Patchwork-Id: 18421 Return-Path: X-Original-To: parsemail@patchwork.libcamera.org Delivered-To: parsemail@patchwork.libcamera.org Received: from lancelot.ideasonboard.com (lancelot.ideasonboard.com [92.243.16.209]) by patchwork.libcamera.org (Postfix) with ESMTPS id C648CC0F1B for ; Sun, 19 Mar 2023 11:30:32 +0000 (UTC) Received: from lancelot.ideasonboard.com (localhost [IPv6:::1]) by lancelot.ideasonboard.com (Postfix) with ESMTP id 0E0E6626A2; Sun, 19 Mar 2023 12:30:32 +0100 (CET) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/simple; d=libcamera.org; s=mail; t=1679225432; bh=qkPNjrrpqm8InO+jYbEiFWUEtkW9qJnBN9/yRJ33YtE=; h=To:Date:Subject:List-Id:List-Unsubscribe:List-Archive:List-Post: List-Help:List-Subscribe:From:Reply-To:From; b=zkVAspKpYGNEKoIuuUMrbzREg5Vam+mvjY5u1aBc1KwX1+yifRJ240GVRq+kYfe6k RvsrmwgQBBGZfhemmry3DGKUpnQNKK4WNFMB3giPEZWZQq2P7PP3ciu6Fqi3QMLARu mdAbQyvGb8/+GKznqyOVzgdQFIICc/JmsFrrERXs93BX5B8eiDKicyVB9T0yC8qJFl wN939TeS2rre75ORSE8pSU6amUJqmufLd/nK+Pl1YtMnzyIWPi7rTMZEVM/yd2k9ns UEZhLkIvb+pfxq15Hf+PILvQpmlWJXumf8U+0tjY7UQC6et1EKVTHLkZIGMagqW43j I7N2BkErrGSMA== Received: from perceval.ideasonboard.com (perceval.ideasonboard.com [213.167.242.64]) by lancelot.ideasonboard.com (Postfix) with ESMTPS id 353DB603AC for ; Sun, 19 Mar 2023 12:30:30 +0100 (CET) Authentication-Results: lancelot.ideasonboard.com; dkim=pass (1024-bit key; unprotected) header.d=ideasonboard.com header.i=@ideasonboard.com header.b="hBCgueOK"; dkim-atps=neutral Received: from desky.lan (91-154-32-225.elisa-laajakaista.fi [91.154.32.225]) by perceval.ideasonboard.com (Postfix) with ESMTPSA id 8D4861858; Sun, 19 Mar 2023 12:30:29 +0100 (CET) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/simple; d=ideasonboard.com; s=mail; t=1679225429; bh=qkPNjrrpqm8InO+jYbEiFWUEtkW9qJnBN9/yRJ33YtE=; h=From:To:Cc:Subject:Date:From; b=hBCgueOKuFczlk0ADu1YX0FRqgAbQCBeTknK+iDVAxLV80hCVI+Cb7oDXre2syo2x yHol1adVq/9YteNrcNjrXeGcP3Fo6/ekrdjfA5zD18kJ7usQ46Ip59JIPLCwUhg4Qr Jz1n6aU4mxjeFNfZk8Wg8WxaiwGD/A+pGirCvWwU= To: libcamera-devel@lists.libcamera.org Date: Sun, 19 Mar 2023 13:30:13 +0200 Message-Id: <20230319113013.25046-1-tomi.valkeinen@ideasonboard.com> X-Mailer: git-send-email 2.34.1 MIME-Version: 1.0 Subject: [libcamera-devel] [PATCH] py: cam: Network renderer X-BeenThere: libcamera-devel@lists.libcamera.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , X-Patchwork-Original-From: Tomi Valkeinen via libcamera-devel From: Tomi Valkeinen Reply-To: Tomi Valkeinen Errors-To: libcamera-devel-bounces@lists.libcamera.org Sender: "libcamera-devel" Here's something I have found useful a few times. This adds a "tx" renderer to cam.py, which sends the frames over the network to a receiver. It also adds a "cam-rx" tool (non-libcamera based) which receives the frames and uses PyQt to show them on the screen, usually ran on a PC. This is obviously not super efficient, but on the PC side it doesn't matter. On the TX side, at least RPi4 seemed to work without noticeable lag, but my old 32-bit TI DRA76, when sending three camera streams, the performance dropped to ~5fps. Still, I find that more than enough for most development work. This could be extended to also transmit the metadata. Signed-off-by: Tomi Valkeinen --- src/py/cam/cam.py | 4 + src/py/cam/cam_tx.py | 94 +++++++++++++ src/py/examples/cam-rx.py | 155 +++++++++++++++++++++ src/py/examples/cam_rx_helpers.py | 223 ++++++++++++++++++++++++++++++ 4 files changed, 476 insertions(+) create mode 100644 src/py/cam/cam_tx.py create mode 100755 src/py/examples/cam-rx.py create mode 100644 src/py/examples/cam_rx_helpers.py diff --git a/src/py/cam/cam.py b/src/py/cam/cam.py index 967a72f5..50f0f8d6 100755 --- a/src/py/cam/cam.py +++ b/src/py/cam/cam.py @@ -387,6 +387,7 @@ def main(): parser.add_argument('--list-controls', action='store_true', help='List cameras controls') parser.add_argument('-I', '--info', action='store_true', help='Display information about stream(s)') parser.add_argument('-R', '--renderer', default='null', help='Renderer (null, kms, qt, qtgl)') + parser.add_argument('--rargs', default='', help='Arguments passed to the renderer (pass --help to see help)') # per camera options parser.add_argument('-C', '--capture', nargs='?', type=int, const=1000000, action=CustomAction, help='Capture until interrupted by user or until CAPTURE frames captured') @@ -449,6 +450,9 @@ def main(): elif args.renderer == 'qtgl': import cam_qtgl renderer = cam_qtgl.QtRenderer(state) + elif args.renderer == 'tx': + import cam_tx + renderer = cam_tx.TxRenderer(state, args.rargs) else: print('Bad renderer', args.renderer) return -1 diff --git a/src/py/cam/cam_tx.py b/src/py/cam/cam_tx.py new file mode 100644 index 00000000..3d31c6ef --- /dev/null +++ b/src/py/cam/cam_tx.py @@ -0,0 +1,94 @@ +# SPDX-License-Identifier: GPL-2.0-or-later +# Copyright (C) 2023, Tomi Valkeinen + +import argparse +import libcamera +import libcamera.utils +import selectors +import socket +import struct +import sys + +PORT = 43242 + +# ctx-idx, width, height, format, num-planes, plane1, plane2, plane3, plane4 +struct_fmt = struct.Struct(' + +from cam_rx_helpers import data_to_pix +from PyQt5 import QtCore, QtWidgets +from PyQt5.QtCore import Qt +import PyQt5.QtNetwork +import struct +import sys +import traceback + +PORT = 43242 +receivers = [] + +struct_fmt = struct.Struct(' +# +# Debayering code based on PiCamera documentation + +from numpy.lib.stride_tricks import as_strided +from PyQt5 import QtGui +import numpy as np + + +def demosaic(data, r0, g0, g1, b0): + # Separate the components from the Bayer data to RGB planes + + rgb = np.zeros(data.shape + (3,), dtype=data.dtype) + rgb[1::2, 0::2, 0] = data[r0[1]::2, r0[0]::2] # Red + rgb[0::2, 0::2, 1] = data[g0[1]::2, g0[0]::2] # Green + rgb[1::2, 1::2, 1] = data[g1[1]::2, g1[0]::2] # Green + rgb[0::2, 1::2, 2] = data[b0[1]::2, b0[0]::2] # Blue + + # Below we present a fairly naive de-mosaic method that simply + # calculates the weighted average of a pixel based on the pixels + # surrounding it. The weighting is provided by a byte representation of + # the Bayer filter which we construct first: + + bayer = np.zeros(rgb.shape, dtype=np.uint8) + bayer[1::2, 0::2, 0] = 1 # Red + bayer[0::2, 0::2, 1] = 1 # Green + bayer[1::2, 1::2, 1] = 1 # Green + bayer[0::2, 1::2, 2] = 1 # Blue + + # Allocate an array to hold our output with the same shape as the input + # data. After this we define the size of window that will be used to + # calculate each weighted average (3x3). Then we pad out the rgb and + # bayer arrays, adding blank pixels at their edges to compensate for the + # size of the window when calculating averages for edge pixels. + + output = np.empty(rgb.shape, dtype=rgb.dtype) + window = (3, 3) + borders = (window[0] - 1, window[1] - 1) + border = (borders[0] // 2, borders[1] // 2) + + rgb = np.pad(rgb, [ + (border[0], border[0]), + (border[1], border[1]), + (0, 0), + ], 'constant') + bayer = np.pad(bayer, [ + (border[0], border[0]), + (border[1], border[1]), + (0, 0), + ], 'constant') + + # For each plane in the RGB data, we use a nifty numpy trick + # (as_strided) to construct a view over the plane of 3x3 matrices. We do + # the same for the bayer array, then use Einstein summation on each + # (np.sum is simpler, but copies the data so it's slower), and divide + # the results to get our weighted average: + + for plane in range(3): + p = rgb[..., plane] + b = bayer[..., plane] + + pview = as_strided(p, shape=( + p.shape[0] - borders[0], + p.shape[1] - borders[1]) + window, strides=p.strides * 2) + bview = as_strided(b, shape=( + b.shape[0] - borders[0], + b.shape[1] - borders[1]) + window, strides=b.strides * 2) + psum = np.einsum('ijkl->ij', pview) + bsum = np.einsum('ijkl->ij', bview) + output[..., plane] = psum // bsum + + return output + + +def convert_raw(data, w, h, fmt): + bayer_pattern = fmt[1:5] + bitspp = int(fmt[5:]) + + if bitspp == 8: + data = data.reshape((h, w)) + data = data.astype(np.uint16) + elif bitspp in [10, 12]: + data = data.view(np.uint16) + data = data.reshape((h, w)) + else: + raise Exception('Bad bitspp:' + str(bitspp)) + + idx = bayer_pattern.find('R') + assert(idx != -1) + r0 = (idx % 2, idx // 2) + + idx = bayer_pattern.find('G') + assert(idx != -1) + g0 = (idx % 2, idx // 2) + + idx = bayer_pattern.find('G', idx + 1) + assert(idx != -1) + g1 = (idx % 2, idx // 2) + + idx = bayer_pattern.find('B') + assert(idx != -1) + b0 = (idx % 2, idx // 2) + + rgb = demosaic(data, r0, g0, g1, b0) + rgb = (rgb >> (bitspp - 8)).astype(np.uint8) + + return rgb + + +def convert_yuv444_to_rgb(yuv): + m = np.array([ + [1.0, 1.0, 1.0], + [-0.000007154783816076815, -0.3441331386566162, 1.7720025777816772], + [1.4019975662231445, -0.7141380310058594, 0.00001542569043522235] + ]) + + rgb = np.dot(yuv, m) + rgb[:, :, 0] -= 179.45477266423404 + rgb[:, :, 1] += 135.45870971679688 + rgb[:, :, 2] -= 226.8183044444304 + rgb = rgb.astype(np.uint8) + + return rgb + + +def convert_yuyv(data, w, h): + # YUV422 + yuyv = data.reshape((h, w // 2 * 4)) + + # YUV444 + yuv = np.empty((h, w, 3), dtype=np.uint8) + yuv[:, :, 0] = yuyv[:, 0::2] # Y + yuv[:, :, 1] = yuyv[:, 1::4].repeat(2, axis=1) # U + yuv[:, :, 2] = yuyv[:, 3::4].repeat(2, axis=1) # V + + return convert_yuv444_to_rgb(yuv) + + +def convert_uyvy(data, w, h): + # YUV422 + yuyv = data.reshape((h, w // 2 * 4)) + + # YUV444 + yuv = np.empty((h, w, 3), dtype=np.uint8) + yuv[:, :, 0] = yuyv[:, 1::2] # Y + yuv[:, :, 1] = yuyv[:, 0::4].repeat(2, axis=1) # U + yuv[:, :, 2] = yuyv[:, 2::4].repeat(2, axis=1) # V + + return convert_yuv444_to_rgb(yuv) + + +def convert_nv12(data, w, h): + plane1 = data[:w * h] + plane2 = data[w * h:] + + y = plane1.reshape((h, w)) + uv = plane2.reshape((h // 2, w // 2, 2)) + + # YUV444 + yuv = np.empty((h, w, 3), dtype=np.uint8) + yuv[:, :, 0] = y[:, :] # Y + yuv[:, :, 1] = uv[:, :, 0].repeat(2, axis=0).repeat(2, axis=1) # U + yuv[:, :, 2] = uv[:, :, 1].repeat(2, axis=0).repeat(2, axis=1) # V + + return convert_yuv444_to_rgb(yuv) + + +def to_rgb(fmt, w, h, data): + if fmt == 'YUYV': + return convert_yuyv(data, w, h) + + if fmt == 'UYVY': + return convert_uyvy(data, w, h) + + elif fmt == 'NV12': + return convert_nv12(data, w, h) + + elif fmt == 'RGB888': + rgb = data.reshape((h, w, 3)) + rgb[:, :, [0, 1, 2]] = rgb[:, :, [2, 1, 0]] + + elif fmt == 'BGR888': + rgb = data.reshape((h, w, 3)) + + elif fmt in ['ARGB8888', 'XRGB8888']: + rgb = data.reshape((h, w, 4)) + rgb = np.flip(rgb, axis=2) + # drop alpha component + rgb = np.delete(rgb, np.s_[0::4], axis=2) + + elif fmt.startswith('S'): + return convert_raw(data, w, h, fmt) + + else: + raise Exception('Unsupported format ' + fmt) + + return rgb + + +def data_to_rgb(fmt, w, h, data): + data = np.frombuffer(data, dtype=np.uint8) + rgb = to_rgb(fmt, w, h, data) + return rgb + + +def rgb_to_pix(rgb): + w = rgb.shape[1] + h = rgb.shape[0] + qim = QtGui.QImage(rgb, w, h, QtGui.QImage.Format.Format_RGB888) + pix = QtGui.QPixmap.fromImage(qim) + return pix + + +def data_to_pix(fmt, w, h, data): + if fmt == 'MJPEG': + pix = QtGui.QPixmap(w, h) + pix.loadFromData(data) + else: + rgb = data_to_rgb(fmt, w, h, data) + pix = rgb_to_pix(rgb) + + return pix