new file mode 100644
@@ -0,0 +1,123 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * post_processor_libyuv.cpp - Post Processor using libyuv
+ */
+
+#include "post_processor_libyuv.h"
+
+#include <libyuv/scale.h>
+
+#include <libcamera/formats.h>
+#include <libcamera/geometry.h>
+#include <libcamera/internal/formats.h>
+#include <libcamera/internal/log.h>
+#include <libcamera/pixel_format.h>
+
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(LIBYUV)
+
+int PostProcessorLibyuv::configure(const StreamConfiguration &inCfg,
+ const StreamConfiguration &outCfg)
+{
+ if (inCfg.pixelFormat != outCfg.pixelFormat) {
+ LOG(LIBYUV, Error)
+ << "Pixel format conversion is not supported"
+ << " (from " << inCfg.toString()
+ << " to " << outCfg.toString() << ")";
+ return -EINVAL;
+ }
+
+ if (inCfg.size < outCfg.size) {
+ LOG(LIBYUV, Error) << "Up-scaling is not supported"
+ << " (from " << inCfg.toString()
+ << " to " << outCfg.toString() << ")";
+ return -EINVAL;
+ }
+
+ if (inCfg.pixelFormat == formats::NV12) {
+ LOG(LIBYUV, Error) << "Unsupported format " << inCfg.pixelFormat
+ << " (only NV12 is supported)";
+ return -EINVAL;
+ }
+
+ getNV12LengthAndStride(inCfg.size, sourceLength_, sourceStride_);
+ getNV12LengthAndStride(outCfg.size, destinationLength_,
+ destinationStride_);
+ return 0;
+}
+
+int PostProcessorLibyuv::process(const FrameBuffer &source,
+ libcamera::MappedBuffer *destination,
+ [[maybe_unused]] const CameraMetadata &requestMetadata,
+ [[maybe_unused]] CameraMetadata *metadata)
+{
+ if (!isValidNV12Buffers(source, *destination))
+ return -EINVAL;
+
+ const MappedFrameBuffer sourceMapped(&source, PROT_READ);
+ if (!sourceMapped.isValid()) {
+ LOG(LIBYUV, Error) << "Failed to mmap camera frame buffer";
+ return -EINVAL;
+ }
+
+ return 0 == libyuv::NV12Scale(sourceMapped.maps()[0].data(),
+ sourceStride_[0],
+ sourceMapped.maps()[1].data(),
+ sourceStride_[1],
+ sourceSize_.width, sourceSize_.height,
+ destination->maps()[0].data(),
+ destinationStride_[0],
+ destination->maps()[1].data(),
+ destinationStride_[1],
+ destinationSize_.width,
+ destinationSize_.height,
+ libyuv::FilterMode::kFilterBilinear);
+}
+
+bool PostProcessorLibyuv::isValidNV12Buffers(
+ const FrameBuffer &source,
+ const libcamera::MappedBuffer &destination) const
+{
+ if (source.planes().size() != 2u) {
+ LOG(LIBYUV, Error) << "The number of source planes is not 2";
+ return false;
+ }
+ if (destination.maps().size() != 2u) {
+ LOG(LIBYUV, Error)
+ << "The number of destination planes is not 2";
+ return false;
+ }
+
+ if (source.planes()[0].length < sourceLength_[0] ||
+ source.planes()[1].length < sourceLength_[1]) {
+ LOG(LIBYUV, Error)
+ << "The source planes lengths are too small";
+ return false;
+ }
+ if (destination.maps()[0].size() < destinationLength_[0] ||
+ destination.maps()[1].size() < destinationLength_[1]) {
+ LOG(LIBYUV, Error)
+ << "The destination planes lengths are too small";
+ return false;
+ }
+
+ return true;
+}
+
+// static
+void PostProcessorLibyuv::getNV12LengthAndStride(Size size,
+ unsigned int length[2],
+ unsigned int stride[2])
+{
+ const PixelFormatInfo &nv12Info = PixelFormatInfo::info(formats::NV12);
+ for (unsigned int i = 0; i < 2; i++) {
+ const unsigned int vertSubSample =
+ nv12Info.planes[i].verticalSubSampling;
+ length[i] = nv12Info.stride(size.width, i, /*align=*/1) *
+ ((size.height + vertSubSample - 1) / vertSubSample);
+ stride[i] = nv12Info.stride(size.width, i, 1);
+ }
+}
new file mode 100644
@@ -0,0 +1,44 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * post_processor_libyuv.h - Post Processor using libyuv
+ */
+#ifndef __ANDROID_POST_PROCESSOR_LIBYUV_H__
+#define __ANDROID_POST_PROCESSOR_LIBYUV_H__
+
+#include <libcamera/geometry.h>
+
+#include "../post_processor.h"
+
+class CameraDevice;
+
+class PostProcessorLibyuv : public PostProcessor
+{
+public:
+ PostProcessorLibyuv() = default;
+
+ int configure(const libcamera::StreamConfiguration &incfg,
+ const libcamera::StreamConfiguration &outcfg) override;
+ int process(const libcamera::FrameBuffer &source,
+ libcamera::MappedBuffer *destination,
+ const CameraMetadata & /*requestMetadata*/,
+ CameraMetadata *metadata) override;
+
+private:
+ bool isValidNV12Buffers(const libcamera::FrameBuffer &source,
+ const libcamera::MappedBuffer &destination) const;
+
+ static void getNV12LengthAndStride(libcamera::Size size,
+ unsigned int length[2],
+ unsigned int stride[2]);
+
+ libcamera::Size sourceSize_;
+ libcamera::Size destinationSize_;
+ unsigned int sourceLength_[2] = {};
+ unsigned int destinationLength_[2] = {};
+ unsigned int sourceStride_[2] = {};
+ unsigned int destinationStride_[2] = {};
+};
+
+#endif /* __ANDROID_POST_PROCESSOR_LIBYUV_H__ */
@@ -44,6 +44,7 @@ android_hal_sources = files([
'jpeg/exif.cpp',
'jpeg/post_processor_jpeg.cpp',
'jpeg/thumbnailer.cpp',
+ 'libyuv/post_processor_libyuv.cpp'
])
android_camera_metadata_sources = files([
This adds PostProcessorLibyuv. It supports NV12 buffer scaling. Signed-off-by: Hirokazu Honda <hiroh@chromium.org> --- src/android/libyuv/post_processor_libyuv.cpp | 123 +++++++++++++++++++ src/android/libyuv/post_processor_libyuv.h | 44 +++++++ src/android/meson.build | 1 + 3 files changed, 168 insertions(+) create mode 100644 src/android/libyuv/post_processor_libyuv.cpp create mode 100644 src/android/libyuv/post_processor_libyuv.h -- 2.30.0.280.ga3ce27912f-goog