Show a patch.

GET /api/patches/18421/?format=api
HTTP 200 OK
Allow: GET, PUT, PATCH, HEAD, OPTIONS
Content-Type: application/json
Vary: Accept

{
    "id": 18421,
    "url": "https://patchwork.libcamera.org/api/patches/18421/?format=api",
    "web_url": "https://patchwork.libcamera.org/patch/18421/",
    "project": {
        "id": 1,
        "url": "https://patchwork.libcamera.org/api/projects/1/?format=api",
        "name": "libcamera",
        "link_name": "libcamera",
        "list_id": "libcamera_core",
        "list_email": "libcamera-devel@lists.libcamera.org",
        "web_url": "",
        "scm_url": "",
        "webscm_url": ""
    },
    "msgid": "<20230319113013.25046-1-tomi.valkeinen@ideasonboard.com>",
    "date": "2023-03-19T11:30:13",
    "name": "[libcamera-devel] py: cam: Network renderer",
    "commit_ref": null,
    "pull_url": null,
    "state": "new",
    "archived": false,
    "hash": "093426a2c4c966cb5650baaeef986c549b55c975",
    "submitter": {
        "id": 109,
        "url": "https://patchwork.libcamera.org/api/people/109/?format=api",
        "name": "Tomi Valkeinen",
        "email": "tomi.valkeinen@ideasonboard.com"
    },
    "delegate": null,
    "mbox": "https://patchwork.libcamera.org/patch/18421/mbox/",
    "series": [
        {
            "id": 3808,
            "url": "https://patchwork.libcamera.org/api/series/3808/?format=api",
            "web_url": "https://patchwork.libcamera.org/project/libcamera/list/?series=3808",
            "date": "2023-03-19T11:30:13",
            "name": "[libcamera-devel] py: cam: Network renderer",
            "version": 1,
            "mbox": "https://patchwork.libcamera.org/series/3808/mbox/"
        }
    ],
    "comments": "https://patchwork.libcamera.org/api/patches/18421/comments/",
    "check": "pending",
    "checks": "https://patchwork.libcamera.org/api/patches/18421/checks/",
    "tags": {},
    "headers": {
        "Return-Path": "<libcamera-devel-bounces@lists.libcamera.org>",
        "X-Original-To": "parsemail@patchwork.libcamera.org",
        "Delivered-To": "parsemail@patchwork.libcamera.org",
        "Received": [
            "from lancelot.ideasonboard.com (lancelot.ideasonboard.com\n\t[92.243.16.209])\n\tby patchwork.libcamera.org (Postfix) with ESMTPS id C648CC0F1B\n\tfor <parsemail@patchwork.libcamera.org>;\n\tSun, 19 Mar 2023 11:30:32 +0000 (UTC)",
            "from lancelot.ideasonboard.com (localhost [IPv6:::1])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTP id 0E0E6626A2;\n\tSun, 19 Mar 2023 12:30:32 +0100 (CET)",
            "from perceval.ideasonboard.com (perceval.ideasonboard.com\n\t[213.167.242.64])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTPS id 353DB603AC\n\tfor <libcamera-devel@lists.libcamera.org>;\n\tSun, 19 Mar 2023 12:30:30 +0100 (CET)",
            "from desky.lan (91-154-32-225.elisa-laajakaista.fi [91.154.32.225])\n\tby perceval.ideasonboard.com (Postfix) with ESMTPSA id 8D4861858;\n\tSun, 19 Mar 2023 12:30:29 +0100 (CET)"
        ],
        "DKIM-Signature": [
            "v=1; a=rsa-sha256; c=relaxed/simple; d=libcamera.org;\n\ts=mail; t=1679225432;\n\tbh=qkPNjrrpqm8InO+jYbEiFWUEtkW9qJnBN9/yRJ33YtE=;\n\th=To:Date:Subject:List-Id:List-Unsubscribe:List-Archive:List-Post:\n\tList-Help:List-Subscribe:From:Reply-To:From;\n\tb=zkVAspKpYGNEKoIuuUMrbzREg5Vam+mvjY5u1aBc1KwX1+yifRJ240GVRq+kYfe6k\n\tRvsrmwgQBBGZfhemmry3DGKUpnQNKK4WNFMB3giPEZWZQq2P7PP3ciu6Fqi3QMLARu\n\tmdAbQyvGb8/+GKznqyOVzgdQFIICc/JmsFrrERXs93BX5B8eiDKicyVB9T0yC8qJFl\n\twN939TeS2rre75ORSE8pSU6amUJqmufLd/nK+Pl1YtMnzyIWPi7rTMZEVM/yd2k9ns\n\tUEZhLkIvb+pfxq15Hf+PILvQpmlWJXumf8U+0tjY7UQC6et1EKVTHLkZIGMagqW43j\n\tI7N2BkErrGSMA==",
            "v=1; a=rsa-sha256; c=relaxed/simple; d=ideasonboard.com;\n\ts=mail; t=1679225429;\n\tbh=qkPNjrrpqm8InO+jYbEiFWUEtkW9qJnBN9/yRJ33YtE=;\n\th=From:To:Cc:Subject:Date:From;\n\tb=hBCgueOKuFczlk0ADu1YX0FRqgAbQCBeTknK+iDVAxLV80hCVI+Cb7oDXre2syo2x\n\tyHol1adVq/9YteNrcNjrXeGcP3Fo6/ekrdjfA5zD18kJ7usQ46Ip59JIPLCwUhg4Qr\n\tJz1n6aU4mxjeFNfZk8Wg8WxaiwGD/A+pGirCvWwU="
        ],
        "Authentication-Results": "lancelot.ideasonboard.com; dkim=pass (1024-bit key; \n\tunprotected) header.d=ideasonboard.com\n\theader.i=@ideasonboard.com\n\theader.b=\"hBCgueOK\"; dkim-atps=neutral",
        "To": "libcamera-devel@lists.libcamera.org",
        "Date": "Sun, 19 Mar 2023 13:30:13 +0200",
        "Message-Id": "<20230319113013.25046-1-tomi.valkeinen@ideasonboard.com>",
        "X-Mailer": "git-send-email 2.34.1",
        "MIME-Version": "1.0",
        "Content-Transfer-Encoding": "8bit",
        "Subject": "[libcamera-devel] [PATCH] py: cam: Network renderer",
        "X-BeenThere": "libcamera-devel@lists.libcamera.org",
        "X-Mailman-Version": "2.1.29",
        "Precedence": "list",
        "List-Id": "<libcamera-devel.lists.libcamera.org>",
        "List-Unsubscribe": "<https://lists.libcamera.org/options/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=unsubscribe>",
        "List-Archive": "<https://lists.libcamera.org/pipermail/libcamera-devel/>",
        "List-Post": "<mailto:libcamera-devel@lists.libcamera.org>",
        "List-Help": "<mailto:libcamera-devel-request@lists.libcamera.org?subject=help>",
        "List-Subscribe": "<https://lists.libcamera.org/listinfo/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=subscribe>",
        "From": "Tomi Valkeinen via libcamera-devel\n\t<libcamera-devel@lists.libcamera.org>",
        "Reply-To": "Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>",
        "Errors-To": "libcamera-devel-bounces@lists.libcamera.org",
        "Sender": "\"libcamera-devel\" <libcamera-devel-bounces@lists.libcamera.org>"
    },
    "content": "Here's something I have found useful a few times.\n\nThis adds a \"tx\" renderer to cam.py, which sends the frames over the\nnetwork to a receiver.\n\nIt also adds a \"cam-rx\" tool (non-libcamera based) which receives the\nframes and uses PyQt to show them on the screen, usually ran on a PC.\n\nThis is obviously not super efficient, but on the PC side it doesn't\nmatter. On the TX side, at least RPi4 seemed to work without noticeable\nlag, but my old 32-bit TI DRA76, when sending three camera streams, the\nperformance dropped to ~5fps. Still, I find that more than enough for\nmost development work.\n\nThis could be extended to also transmit the metadata.\n\nSigned-off-by: Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>\n---\n src/py/cam/cam.py                 |   4 +\n src/py/cam/cam_tx.py              |  94 +++++++++++++\n src/py/examples/cam-rx.py         | 155 +++++++++++++++++++++\n src/py/examples/cam_rx_helpers.py | 223 ++++++++++++++++++++++++++++++\n 4 files changed, 476 insertions(+)\n create mode 100644 src/py/cam/cam_tx.py\n create mode 100755 src/py/examples/cam-rx.py\n create mode 100644 src/py/examples/cam_rx_helpers.py",
    "diff": "diff --git a/src/py/cam/cam.py b/src/py/cam/cam.py\nindex 967a72f5..50f0f8d6 100755\n--- a/src/py/cam/cam.py\n+++ b/src/py/cam/cam.py\n@@ -387,6 +387,7 @@ def main():\n     parser.add_argument('--list-controls', action='store_true', help='List cameras controls')\n     parser.add_argument('-I', '--info', action='store_true', help='Display information about stream(s)')\n     parser.add_argument('-R', '--renderer', default='null', help='Renderer (null, kms, qt, qtgl)')\n+    parser.add_argument('--rargs', default='', help='Arguments passed to the renderer (pass --help to see help)')\n \n     # per camera options\n     parser.add_argument('-C', '--capture', nargs='?', type=int, const=1000000, action=CustomAction, help='Capture until interrupted by user or until CAPTURE frames captured')\n@@ -449,6 +450,9 @@ def main():\n         elif args.renderer == 'qtgl':\n             import cam_qtgl\n             renderer = cam_qtgl.QtRenderer(state)\n+        elif args.renderer == 'tx':\n+            import cam_tx\n+            renderer = cam_tx.TxRenderer(state, args.rargs)\n         else:\n             print('Bad renderer', args.renderer)\n             return -1\ndiff --git a/src/py/cam/cam_tx.py b/src/py/cam/cam_tx.py\nnew file mode 100644\nindex 00000000..3d31c6ef\n--- /dev/null\n+++ b/src/py/cam/cam_tx.py\n@@ -0,0 +1,94 @@\n+# SPDX-License-Identifier: GPL-2.0-or-later\n+# Copyright (C) 2023, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>\n+\n+import argparse\n+import libcamera\n+import libcamera.utils\n+import selectors\n+import socket\n+import struct\n+import sys\n+\n+PORT = 43242\n+\n+# ctx-idx, width, height, format, num-planes, plane1, plane2, plane3, plane4\n+struct_fmt = struct.Struct('<III12pI4I')\n+\n+\n+class TxRenderer:\n+    def __init__(self, state, ropts):\n+        parser = argparse.ArgumentParser(prog='TxRenderer')\n+        parser.add_argument('host', default='localhost', help='Address')\n+        args = parser.parse_args(ropts.split(' '))\n+\n+        self.host = args.host\n+\n+        self.state = state\n+\n+        self.cm = state.cm\n+        self.contexts = state.contexts\n+\n+        self.running = False\n+\n+    def setup(self):\n+        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n+        sock.connect((self.host, PORT))\n+        self.sock = sock\n+\n+        buf_mmap_map = {}\n+\n+        for ctx in self.contexts:\n+            for stream in ctx.streams:\n+                for buf in ctx.allocator.buffers(stream):\n+                    mfb = libcamera.utils.MappedFrameBuffer(buf).mmap()\n+                    buf_mmap_map[buf] = mfb\n+\n+        self.buf_mmap_map = buf_mmap_map\n+\n+    def run(self):\n+        print('Capturing...')\n+\n+        self.running = True\n+\n+        sel = selectors.DefaultSelector()\n+        sel.register(self.cm.event_fd, selectors.EVENT_READ, self.readcam)\n+        sel.register(sys.stdin, selectors.EVENT_READ, self.readkey)\n+\n+        print('Press enter to exit')\n+\n+        while self.running:\n+            events = sel.select()\n+            for key, _ in events:\n+                callback = key.data\n+                callback(key.fileobj)\n+\n+        print('Exiting...')\n+\n+    def readcam(self, fd):\n+        self.running = self.state.event_handler()\n+\n+    def readkey(self, fileobj):\n+        sys.stdin.readline()\n+        self.running = False\n+\n+    def request_handler(self, ctx, req):\n+        buffers = req.buffers\n+\n+        for stream, fb in buffers.items():\n+            mfb = self.buf_mmap_map[fb]\n+\n+            plane_sizes = [len(p) for p in mfb.planes] + [0] * (4 - len(mfb.planes))\n+\n+            stream_config = stream.configuration\n+\n+            hdr = struct_fmt.pack(ctx.idx,\n+                                  stream_config.size.width, stream_config.size.height,\n+                                  bytes(str(stream_config.pixel_format), 'ascii'),\n+                                  len(mfb.planes), *plane_sizes)\n+\n+            self.sock.sendall(hdr)\n+\n+            for p in mfb.planes:\n+                self.sock.sendall(p)\n+\n+        self.state.request_processed(ctx, req)\ndiff --git a/src/py/examples/cam-rx.py b/src/py/examples/cam-rx.py\nnew file mode 100755\nindex 00000000..a53d59c8\n--- /dev/null\n+++ b/src/py/examples/cam-rx.py\n@@ -0,0 +1,155 @@\n+#!/usr/bin/env python3\n+\n+# SPDX-License-Identifier: BSD-3-Clause\n+# Copyright (C) 2023, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>\n+\n+from cam_rx_helpers import data_to_pix\n+from PyQt5 import QtCore, QtWidgets\n+from PyQt5.QtCore import Qt\n+import PyQt5.QtNetwork\n+import struct\n+import sys\n+import traceback\n+\n+PORT = 43242\n+receivers = []\n+\n+struct_fmt = struct.Struct('<III12pI4I')\n+\n+\n+# Loading MJPEG to a QPixmap produces corrupt JPEG data warnings. Ignore these.\n+def qt_message_handler(msg_type, msg_log_context, msg_string):\n+    if msg_string.startswith(\"Corrupt JPEG data\"):\n+        return\n+\n+    # For some reason qInstallMessageHandler returns None, so we won't\n+    # call the old handler\n+    if old_msg_handler is not None:\n+        old_msg_handler(msg_type, msg_log_context, msg_string)\n+    else:\n+        print(msg_string)\n+\n+\n+old_msg_handler = QtCore.qInstallMessageHandler(qt_message_handler)\n+\n+\n+class Receiver(QtWidgets.QWidget):\n+    def __init__(self, socket: PyQt5.QtNetwork.QTcpSocket):\n+        super().__init__()\n+\n+        self.name = '{}:{}'.format(socket.peerAddress().toString(), socket.peerPort())\n+\n+        print('[{}] Accepted new connection'.format(self.name))\n+\n+        self.socket = socket\n+\n+        self.socket.readyRead.connect(self.on_ready_read)\n+        self.socket.disconnected.connect(self.on_disconnected)\n+        self.socket.error.connect(self.on_error)\n+\n+        self.header_buffer = bytearray()\n+        self.data_buffer = bytearray()\n+        self.data_size = 0\n+\n+        self.state = 0\n+\n+        self.resize(1000, 600)\n+        self.setAttribute(Qt.WA_ShowWithoutActivating)\n+        self.setWindowFlag(Qt.WindowStaysOnTopHint, True)\n+\n+        self.gridLayout = QtWidgets.QGridLayout()\n+        self.setLayout(self.gridLayout)\n+\n+        self.labels = {}\n+\n+        self.show()\n+        print(\"done\")\n+\n+    def on_ready_read(self):\n+        while self.socket.bytesAvailable():\n+            if self.state == 0:\n+                data = self.socket.read(struct_fmt.size - len(self.header_buffer))\n+                self.header_buffer.extend(data)\n+\n+                if len(self.header_buffer) == struct_fmt.size:\n+                    self.on_header()\n+            else:\n+                data = self.socket.read(self.data_size - len(self.data_buffer))\n+                self.data_buffer.extend(data)\n+\n+                if len(self.data_buffer) == self.data_size:\n+                    try:\n+                        self.on_buffers()\n+                    except Exception:\n+                        print(traceback.format_exc())\n+                        qApp.exit(-1)\n+                        return\n+\n+    def on_header(self):\n+        self.header_tuple = struct_fmt.unpack_from(self.header_buffer)\n+        idx, w, h, fmtstr, num_planes, p0, p1, p2, p3 = self.header_tuple\n+        self.data_size = p0 + p1 + p2 + p3\n+        self.header_buffer = bytearray()\n+\n+        self.state = 1\n+\n+    def on_buffers(self):\n+        idx, w, h, fmtstr, num_planes, p0, p1, p2, p3 = self.header_tuple\n+        fmt = fmtstr.decode('ascii')\n+\n+        print('[{}] cam{} {}x{}-{}'.format(self.name, idx, w, h, fmt))\n+\n+        if idx not in self.labels:\n+            label = QtWidgets.QLabel()\n+            label.setSizePolicy(QtWidgets.QSizePolicy.Ignored, QtWidgets.QSizePolicy.Ignored)\n+            self.labels[idx] = label\n+            self.gridLayout.addWidget(label, self.gridLayout.count() // 2, self.gridLayout.count() % 2)\n+\n+        label = self.labels[idx]\n+\n+        pix = data_to_pix(fmt, w, h, self.data_buffer)\n+\n+        pix = pix.scaled(label.width(), label.height(), Qt.AspectRatioMode.KeepAspectRatio,\n+                         Qt.TransformationMode.FastTransformation)\n+\n+        label.setPixmap(pix)\n+\n+        self.data_buffer = bytearray()\n+\n+        self.state = 0\n+\n+    def on_disconnected(self):\n+        print('[{}] Disconnected'.format(self.name))\n+        self.close()\n+        receivers.remove(self)\n+\n+    def on_error(self):\n+        print('[{}] Error: {}'.format(self.name, self.socket.errorString()))\n+\n+\n+def new_connection(tcpServer):\n+    clientConnection: PyQt5.QtNetwork.QTcpSocket = tcpServer.nextPendingConnection()\n+    w = Receiver(clientConnection)\n+    receivers.append(w)\n+\n+\n+def readkey():\n+    global qApp\n+    sys.stdin.readline()\n+    qApp.quit()\n+\n+\n+if __name__ == '__main__':\n+    global qApp\n+\n+    qApp = QtWidgets.QApplication(sys.argv)\n+    qApp.setQuitOnLastWindowClosed(False)\n+\n+    keynotif = QtCore.QSocketNotifier(sys.stdin.fileno(), QtCore.QSocketNotifier.Read)\n+    keynotif.activated.connect(readkey)\n+\n+    tcpServer = PyQt5.QtNetwork.QTcpServer(qApp)\n+    tcpServer.listen(PyQt5.QtNetwork.QHostAddress('0.0.0.0'), PORT)\n+    tcpServer.newConnection.connect(lambda: new_connection(tcpServer))\n+\n+    sys.exit(qApp.exec_())\ndiff --git a/src/py/examples/cam_rx_helpers.py b/src/py/examples/cam_rx_helpers.py\nnew file mode 100644\nindex 00000000..293eb63d\n--- /dev/null\n+++ b/src/py/examples/cam_rx_helpers.py\n@@ -0,0 +1,223 @@\n+# SPDX-License-Identifier: BSD-3-Clause\n+# Copyright (C) 2023, Tomi Valkeinen <tomi.valkeinen@ideasonboard.com>\n+#\n+# Debayering code based on PiCamera documentation\n+\n+from numpy.lib.stride_tricks import as_strided\n+from PyQt5 import QtGui\n+import numpy as np\n+\n+\n+def demosaic(data, r0, g0, g1, b0):\n+    # Separate the components from the Bayer data to RGB planes\n+\n+    rgb = np.zeros(data.shape + (3,), dtype=data.dtype)\n+    rgb[1::2, 0::2, 0] = data[r0[1]::2, r0[0]::2]  # Red\n+    rgb[0::2, 0::2, 1] = data[g0[1]::2, g0[0]::2]  # Green\n+    rgb[1::2, 1::2, 1] = data[g1[1]::2, g1[0]::2]  # Green\n+    rgb[0::2, 1::2, 2] = data[b0[1]::2, b0[0]::2]  # Blue\n+\n+    # Below we present a fairly naive de-mosaic method that simply\n+    # calculates the weighted average of a pixel based on the pixels\n+    # surrounding it. The weighting is provided by a byte representation of\n+    # the Bayer filter which we construct first:\n+\n+    bayer = np.zeros(rgb.shape, dtype=np.uint8)\n+    bayer[1::2, 0::2, 0] = 1  # Red\n+    bayer[0::2, 0::2, 1] = 1  # Green\n+    bayer[1::2, 1::2, 1] = 1  # Green\n+    bayer[0::2, 1::2, 2] = 1  # Blue\n+\n+    # Allocate an array to hold our output with the same shape as the input\n+    # data. After this we define the size of window that will be used to\n+    # calculate each weighted average (3x3). Then we pad out the rgb and\n+    # bayer arrays, adding blank pixels at their edges to compensate for the\n+    # size of the window when calculating averages for edge pixels.\n+\n+    output = np.empty(rgb.shape, dtype=rgb.dtype)\n+    window = (3, 3)\n+    borders = (window[0] - 1, window[1] - 1)\n+    border = (borders[0] // 2, borders[1] // 2)\n+\n+    rgb = np.pad(rgb, [\n+        (border[0], border[0]),\n+        (border[1], border[1]),\n+        (0, 0),\n+    ], 'constant')\n+    bayer = np.pad(bayer, [\n+        (border[0], border[0]),\n+        (border[1], border[1]),\n+        (0, 0),\n+    ], 'constant')\n+\n+    # For each plane in the RGB data, we use a nifty numpy trick\n+    # (as_strided) to construct a view over the plane of 3x3 matrices. We do\n+    # the same for the bayer array, then use Einstein summation on each\n+    # (np.sum is simpler, but copies the data so it's slower), and divide\n+    # the results to get our weighted average:\n+\n+    for plane in range(3):\n+        p = rgb[..., plane]\n+        b = bayer[..., plane]\n+\n+        pview = as_strided(p, shape=(\n+            p.shape[0] - borders[0],\n+            p.shape[1] - borders[1]) + window, strides=p.strides * 2)\n+        bview = as_strided(b, shape=(\n+            b.shape[0] - borders[0],\n+            b.shape[1] - borders[1]) + window, strides=b.strides * 2)\n+        psum = np.einsum('ijkl->ij', pview)\n+        bsum = np.einsum('ijkl->ij', bview)\n+        output[..., plane] = psum // bsum\n+\n+    return output\n+\n+\n+def convert_raw(data, w, h, fmt):\n+    bayer_pattern = fmt[1:5]\n+    bitspp = int(fmt[5:])\n+\n+    if bitspp == 8:\n+        data = data.reshape((h, w))\n+        data = data.astype(np.uint16)\n+    elif bitspp in [10, 12]:\n+        data = data.view(np.uint16)\n+        data = data.reshape((h, w))\n+    else:\n+        raise Exception('Bad bitspp:' + str(bitspp))\n+\n+    idx = bayer_pattern.find('R')\n+    assert(idx != -1)\n+    r0 = (idx % 2, idx // 2)\n+\n+    idx = bayer_pattern.find('G')\n+    assert(idx != -1)\n+    g0 = (idx % 2, idx // 2)\n+\n+    idx = bayer_pattern.find('G', idx + 1)\n+    assert(idx != -1)\n+    g1 = (idx % 2, idx // 2)\n+\n+    idx = bayer_pattern.find('B')\n+    assert(idx != -1)\n+    b0 = (idx % 2, idx // 2)\n+\n+    rgb = demosaic(data, r0, g0, g1, b0)\n+    rgb = (rgb >> (bitspp - 8)).astype(np.uint8)\n+\n+    return rgb\n+\n+\n+def convert_yuv444_to_rgb(yuv):\n+    m = np.array([\n+        [1.0, 1.0, 1.0],\n+        [-0.000007154783816076815, -0.3441331386566162, 1.7720025777816772],\n+        [1.4019975662231445, -0.7141380310058594, 0.00001542569043522235]\n+    ])\n+\n+    rgb = np.dot(yuv, m)\n+    rgb[:, :, 0] -= 179.45477266423404\n+    rgb[:, :, 1] += 135.45870971679688\n+    rgb[:, :, 2] -= 226.8183044444304\n+    rgb = rgb.astype(np.uint8)\n+\n+    return rgb\n+\n+\n+def convert_yuyv(data, w, h):\n+    # YUV422\n+    yuyv = data.reshape((h, w // 2 * 4))\n+\n+    # YUV444\n+    yuv = np.empty((h, w, 3), dtype=np.uint8)\n+    yuv[:, :, 0] = yuyv[:, 0::2]                    # Y\n+    yuv[:, :, 1] = yuyv[:, 1::4].repeat(2, axis=1)  # U\n+    yuv[:, :, 2] = yuyv[:, 3::4].repeat(2, axis=1)  # V\n+\n+    return convert_yuv444_to_rgb(yuv)\n+\n+\n+def convert_uyvy(data, w, h):\n+    # YUV422\n+    yuyv = data.reshape((h, w // 2 * 4))\n+\n+    # YUV444\n+    yuv = np.empty((h, w, 3), dtype=np.uint8)\n+    yuv[:, :, 0] = yuyv[:, 1::2]                    # Y\n+    yuv[:, :, 1] = yuyv[:, 0::4].repeat(2, axis=1)  # U\n+    yuv[:, :, 2] = yuyv[:, 2::4].repeat(2, axis=1)  # V\n+\n+    return convert_yuv444_to_rgb(yuv)\n+\n+\n+def convert_nv12(data, w, h):\n+    plane1 = data[:w * h]\n+    plane2 = data[w * h:]\n+\n+    y = plane1.reshape((h, w))\n+    uv = plane2.reshape((h // 2, w // 2, 2))\n+\n+    # YUV444\n+    yuv = np.empty((h, w, 3), dtype=np.uint8)\n+    yuv[:, :, 0] = y[:, :]                    # Y\n+    yuv[:, :, 1] = uv[:, :, 0].repeat(2, axis=0).repeat(2, axis=1)  # U\n+    yuv[:, :, 2] = uv[:, :, 1].repeat(2, axis=0).repeat(2, axis=1)  # V\n+\n+    return convert_yuv444_to_rgb(yuv)\n+\n+\n+def to_rgb(fmt, w, h, data):\n+    if fmt == 'YUYV':\n+        return convert_yuyv(data, w, h)\n+\n+    if fmt == 'UYVY':\n+        return convert_uyvy(data, w, h)\n+\n+    elif fmt == 'NV12':\n+        return convert_nv12(data, w, h)\n+\n+    elif fmt == 'RGB888':\n+        rgb = data.reshape((h, w, 3))\n+        rgb[:, :, [0, 1, 2]] = rgb[:, :, [2, 1, 0]]\n+\n+    elif fmt == 'BGR888':\n+        rgb = data.reshape((h, w, 3))\n+\n+    elif fmt in ['ARGB8888', 'XRGB8888']:\n+        rgb = data.reshape((h, w, 4))\n+        rgb = np.flip(rgb, axis=2)\n+        # drop alpha component\n+        rgb = np.delete(rgb, np.s_[0::4], axis=2)\n+\n+    elif fmt.startswith('S'):\n+        return convert_raw(data, w, h, fmt)\n+\n+    else:\n+        raise Exception('Unsupported format ' + fmt)\n+\n+    return rgb\n+\n+\n+def data_to_rgb(fmt, w, h, data):\n+    data = np.frombuffer(data, dtype=np.uint8)\n+    rgb = to_rgb(fmt, w, h, data)\n+    return rgb\n+\n+\n+def rgb_to_pix(rgb):\n+    w = rgb.shape[1]\n+    h = rgb.shape[0]\n+    qim = QtGui.QImage(rgb, w, h, QtGui.QImage.Format.Format_RGB888)\n+    pix = QtGui.QPixmap.fromImage(qim)\n+    return pix\n+\n+\n+def data_to_pix(fmt, w, h, data):\n+    if fmt == 'MJPEG':\n+        pix = QtGui.QPixmap(w, h)\n+        pix.loadFromData(data)\n+    else:\n+        rgb = data_to_rgb(fmt, w, h, data)\n+        pix = rgb_to_pix(rgb)\n+\n+    return pix\n",
    "prefixes": [
        "libcamera-devel"
    ]
}