[libcamera-devel,RFC,5/6] android: camera_device: Introduce PostProcessorType
diff mbox series

Message ID 20210204100541.657503-6-hiroh@chromium.org
State New
Headers show
Series
  • Support stream mapping in Android HAL adaptation layer
Related show

Commit Message

Hirokazu Honda Feb. 4, 2021, 10:05 a.m. UTC
This introduces PostProcessorType. It tracks the required post
processor for CameraStream.

Signed-off-by: Hirokazu Honda <hiroh@chromium.org>
---
 src/android/camera_device.cpp | 80 +++++++++++++++++++++++++++++------
 1 file changed, 68 insertions(+), 12 deletions(-)

--
2.30.0.365.g02bc693789-goog

Patch
diff mbox series

diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
index 93dfbcdb..a1ef07b6 100644
--- a/src/android/camera_device.cpp
+++ b/src/android/camera_device.cpp
@@ -9,6 +9,7 @@ 
 #include "camera_ops.h"
 #include "post_processor.h"
 #include "jpeg/post_processor_jpeg.h"
+#include "yuv/post_processor_yuv.h"

 #include <fstream>
 #include <sys/mman.h>
@@ -131,6 +132,17 @@  const std::map<int, const Camera3Format> camera3FormatsMap = {
 	},
 };

+/* PostProcessorType represents a post processor class.
+ * None: No post processor
+ * Jpeg: Jpeg encoding post processor
+ * Yuv : Yuv post post processor.
+ */
+enum class PostProcessorType : uint8_t {
+	None = 0,
+	Jpeg = 1,
+	Yuv  = 2,
+};
+
 /*
  * \struct Camera3StreamConfig
  * \brief Data to store StreamConfiguration associated with camera3_stream(s)
@@ -142,11 +154,16 @@  struct Camera3StreamConfig {
 	struct Camera3Stream {
 		Camera3Stream(camera3_stream_t *stream, CameraStream::Type type)
 			: stream(stream), type(type) {}
+		Camera3Stream(camera3_stream_t *stream, CameraStream::Type type,
+			      PostProcessorType postProcessorType)
+			: stream(stream), type(type),
+			  postProcessorType(postProcessorType) {}

 		std::string toString() const;

 		camera3_stream_t *stream = nullptr;
 		CameraStream::Type type = CameraStream::Type::Direct;
+		PostProcessorType postProcessorType = PostProcessorType::None;
 	};

 	std::string toString() const;
@@ -169,6 +186,15 @@  std::string Camera3StreamConfig::Camera3Stream::toString() const {
 		os << "Mapped"; break;
 	}

+	switch (postProcessorType) {
+	case PostProcessorType::None:
+		os << "None"; break;
+	case PostProcessorType::Jpeg:
+		os << "Jpeg"; break;
+	case PostProcessorType::Yuv:
+		os << "Yuv"; break;
+	}
+
 	return os.str();
 }

@@ -1554,6 +1580,7 @@  int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
 	/* Now handle the MJPEG streams, adding a new stream if required. */
 	if (jpegStream) {
 		CameraStream::Type type;
+		PostProcessorType postProcessorType = PostProcessorType::None;
 		int index = -1;

 		/* Search for a compatible stream in the non-JPEG ones. */
@@ -1573,6 +1600,7 @@  int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)

 			type = CameraStream::Type::Mapped;
 			index = i;
+			postProcessorType = PostProcessorType::Jpeg;
 			break;
 		}

@@ -1600,14 +1628,18 @@  int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
 			index = streamConfigs.size() - 1;
 		}

-		streamConfigs[index].streams.push_back({ jpegStream, type });
+		streamConfigs[index].streams.push_back(
+			{ jpegStream, type, postProcessorType });
 	}

 	sortCamera3StreamConfigs(streamConfigs, jpegStream);
+
+	std::vector<PostProcessorType> postProcessorTypes;
 	for (const auto &streamConfig : streamConfigs) {
 		config_->addConfiguration(streamConfig.config);

 		for (auto &stream : streamConfig.streams) {
+			postProcessorTypes.push_back(stream.postProcessorType);
 			streams_.emplace_back(this, stream.type, stream.stream,
 					      config_->size() - 1);
 			stream.stream->priv = static_cast<void *>(&streams_.back());
@@ -1647,21 +1679,46 @@  int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
 	 * StreamConfiguration and set the number of required buffers in
 	 * the Android camera3_stream_t.
 	 */
-	for (CameraStream &cameraStream : streams_) {
+	for (unsigned int i = 0; i < streams_.size(); ++i) {
+		CameraStream &cameraStream = streams_[i];
 		std::unique_ptr<PostProcessor> postProcessor;

-		if (cameraStream.type() == CameraStream::Type::Internal ||
-		    cameraStream.type() == CameraStream::Type::Mapped) {
+		switch(postProcessorTypes[i]) {
+		case PostProcessorType::None:
+			break;
+		case PostProcessorType::Jpeg:
 			postProcessor =
 				std::make_unique<PostProcessorJpeg>(this);
+			break;
+		case PostProcessorType::Yuv:
+			postProcessor = std::make_unique<PostProcessorYuv>();
+			break;
+		}
+
+		if (postProcessor) {
+			const auto &camera3Stream = cameraStream.camera3Stream();
+			StreamConfiguration output;
+			switch (camera3Stream.format) {
+			case HAL_PIXEL_FORMAT_BLOB:
+				output.pixelFormat = formats::MJPEG;
+				break;
+			case HAL_PIXEL_FORMAT_YCbCr_420_888:
+				output.pixelFormat = formats::NV12;
+				break;
+			default:
+				LOG(HAL, Error)
+					<< "Unexpected format: "
+					<< utils::hex(camera3Stream.format);
+				break;
+			}

-			auto output = cameraStream.configuration();
-			output.pixelFormat = formats::MJPEG;
-			postProcessor->configure(cameraStream.configuration(),
-						 output);
+			output.size.width = camera3Stream.width;
+			output.size.height = camera3Stream.height;
+			ret = postProcessor->configure(
+				cameraStream.configuration(), output);
 			if (ret) {
-				LOG(HAL, Error) << "Failed to configure "
-						<< "PostProcessorJpeg";
+				LOG(HAL, Error)
+					<< "Failed to configure post processor";
 				return ret;
 			}
 		}
@@ -1871,12 +1928,11 @@  void CameraDevice::requestComplete(Request *request)
 	uint64_t timestamp = buffers.begin()->second->metadata().timestamp;
 	resultMetadata = getResultMetadata(descriptor, timestamp);

-	/* Handle any JPEG compression. */
 	for (unsigned int i = 0; i < descriptor->numBuffers_; ++i) {
 		CameraStream *cameraStream =
 			static_cast<CameraStream *>(descriptor->buffers_[i].stream->priv);

-		if (cameraStream->camera3Stream().format != HAL_PIXEL_FORMAT_BLOB)
+		if (cameraStream->type() == CameraStream::Type::Direct)
 			continue;

 		FrameBuffer *buffer = request->findBuffer(cameraStream->stream());