[libcamera-devel,v3,13/13] android: camera_device: Support MJPEG stream construction

Message ID 20200804214711.177645-14-kieran.bingham@ideasonboard.com
State Superseded
Headers show
Series
  • android: JPEG support
Related show

Commit Message

Kieran Bingham Aug. 4, 2020, 9:47 p.m. UTC
MJPEG streams must be created referencing a libcamera stream.
This stream may already be provided by the request configuration,
in which case the existing stream is utilised.

If no compatible stream is available to encode, a new stream is requested
from the libcamera configuration.

Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>

---

v3
 - Add max jpeg size todo
 - Fix metadata allocations
 - Use a class member to store the max jpeg buffer size
 - Remove the scoping level for jpeg blob header
 - Don't rely on the compressor pointer as a flag
 - Fix camera metadata size allocations
---
 src/android/camera_device.cpp | 226 ++++++++++++++++++++++++++++++++--
 src/android/camera_device.h   |  12 ++
 2 files changed, 229 insertions(+), 9 deletions(-)

Comments

Jacopo Mondi Aug. 5, 2020, 8:03 a.m. UTC | #1
Hi Kieran,

On Tue, Aug 04, 2020 at 10:47:11PM +0100, Kieran Bingham wrote:
> MJPEG streams must be created referencing a libcamera stream.
> This stream may already be provided by the request configuration,
> in which case the existing stream is utilised.
>
> If no compatible stream is available to encode, a new stream is requested
> from the libcamera configuration.
>
> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
>
> ---
>
> v3
>  - Add max jpeg size todo
>  - Fix metadata allocations
>  - Use a class member to store the max jpeg buffer size
>  - Remove the scoping level for jpeg blob header
>  - Don't rely on the compressor pointer as a flag
>  - Fix camera metadata size allocations
> ---
>  src/android/camera_device.cpp | 226 ++++++++++++++++++++++++++++++++--
>  src/android/camera_device.h   |  12 ++
>  2 files changed, 229 insertions(+), 9 deletions(-)
>
> diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
> index c529246e115c..433243c3bc56 100644
> --- a/src/android/camera_device.cpp
> +++ b/src/android/camera_device.cpp
> @@ -8,6 +8,7 @@
>  #include "camera_device.h"
>  #include "camera_ops.h"
>
> +#include <sys/mman.h>
>  #include <tuple>
>  #include <vector>
>
> @@ -22,6 +23,8 @@
>  #include "camera_metadata.h"
>  #include "system/graphics.h"
>
> +#include "jpeg/compressor_jpeg.h"
> +
>  using namespace libcamera;
>
>  namespace {
> @@ -129,6 +132,54 @@ const std::map<int, const Camera3Format> camera3FormatsMap = {
>
>  LOG_DECLARE_CATEGORY(HAL);
>
> +class MappedCamera3Buffer : public MappedBuffer
> +{
> +public:
> +	MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags);
> +};
> +
> +MappedCamera3Buffer::MappedCamera3Buffer(const buffer_handle_t camera3buffer,
> +					 int flags)
> +{
> +	maps_.reserve(camera3buffer->numFds);
> +	error_ = 0;
> +
> +	for (int i = 0; i < camera3buffer->numFds; i++) {
> +		if (camera3buffer->data[i] == -1)
> +			continue;
> +
> +		off_t length = lseek(camera3buffer->data[i], 0, SEEK_END);
> +		if (length < 0) {
> +			error_ = errno;
> +			LOG(HAL, Error) << "Failed to query plane length";
> +			break;
> +		}
> +
> +		void *address = mmap(nullptr, length, flags, MAP_SHARED,
> +				     camera3buffer->data[i], 0);
> +		if (address == MAP_FAILED) {
> +			error_ = errno;
> +			LOG(HAL, Error) << "Failed to mmap plane";
> +			break;
> +		}
> +
> +		maps_.emplace_back(static_cast<uint8_t *>(address),
> +				   static_cast<size_t>(length));
> +	}
> +
> +	valid_ = error_ == 0;
> +}
> +
> +CameraStream::CameraStream(PixelFormat f, Size s)
> +	: index(-1), format(f), size(s), jpeg(nullptr)
> +{
> +}
> +
> +CameraStream::~CameraStream()
> +{
> +	delete jpeg;
> +};
> +
>  /*
>   * \struct Camera3RequestDescriptor
>   *
> @@ -167,6 +218,12 @@ CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camer
>  	  facing_(CAMERA_FACING_FRONT), orientation_(0)
>  {
>  	camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
> +
> +	/*
> +	 * \todo Determine a more accurate value for this during
> +	 *  streamConfiguration.
> +	 */
> +	max_jpeg_buffer_size_ = 13 << 20; /* 13631488 from USB HAL */
>  }
>
>  CameraDevice::~CameraDevice()
> @@ -417,10 +474,10 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize()
>  {
>  	/*
>  	 * \todo Keep this in sync with the actual number of entries.
> -	 * Currently: 50 entries, 647 bytes of static metadata
> +	 * Currently: 51 entries, 667 bytes of static metadata
>  	 */
> -	uint32_t numEntries = 50;
> -	uint32_t byteSize = 651;
> +	uint32_t numEntries = 51;
> +	uint32_t byteSize = 667;
>
>  	/*
>  	 * Calculate space occupation in bytes for dynamically built metadata
> @@ -576,6 +633,12 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>  				  availableThumbnailSizes.data(),
>  				  availableThumbnailSizes.size());
>
> +	/*
> +	 * \todo Calculate the maximum JPEG buffer size by asking the compressor
> +	 * giving the maximum frame size required.
> +	 */
> +	staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &max_jpeg_buffer_size_, 1);
> +
>  	/* Sensor static metadata. */
>  	int32_t pixelArraySize[] = {
>  		2592, 1944,
> @@ -789,6 +852,7 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>  		ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
>  		ANDROID_CONTROL_AVAILABLE_MODES,
>  		ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
> +		ANDROID_JPEG_MAX_SIZE,
>  		ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
>  		ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
>  		ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
> @@ -855,6 +919,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>  		ANDROID_SENSOR_EXPOSURE_TIME,
>  		ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
>  		ANDROID_STATISTICS_SCENE_FLICKER,
> +		ANDROID_JPEG_SIZE,
> +		ANDROID_JPEG_QUALITY,
> +		ANDROID_JPEG_ORIENTATION,
>  	};
>  	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
>  				  availableResultKeys.data(),
> @@ -1016,8 +1083,10 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>  	 */
>  	unsigned int streamIndex = 0;
>
> +	/* First handle all non-MJPEG streams. */
>  	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
>  		camera3_stream_t *stream = stream_list->streams[i];
> +		Size size(stream->width, stream->height);
>
>  		PixelFormat format = toPixelFormat(stream->format);
>
> @@ -1031,16 +1100,71 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>  		if (!format.isValid())
>  			return -EINVAL;
>
> +		streams_.emplace_back(format, size);
> +		stream->priv = static_cast<void *>(&streams_[i]);
> +
> +		/* Defer handling of MJPEG streams until all others are known. */
> +		if (format == formats::MJPEG)
> +			continue;
> +
>  		StreamConfiguration streamConfiguration;
>
> -		streamConfiguration.size.width = stream->width;
> -		streamConfiguration.size.height = stream->height;
> +		streamConfiguration.size = size;
>  		streamConfiguration.pixelFormat = format;
>
>  		config_->addConfiguration(streamConfiguration);
>
>  		streams_[i].index = streamIndex++;
> -		stream->priv = static_cast<void *>(&streams_[i]);
> +	}
> +
> +	/* Now handle MJPEG streams, adding a new stream if required. */
> +	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
> +		camera3_stream_t *stream = stream_list->streams[i];
> +		bool match = false;
> +
> +		if (streams_[i].format != formats::MJPEG)
> +			continue;
> +
> +		/* Search for a compatible stream */
> +		for (unsigned int j = 0; j < config_->size(); j++) {
> +			StreamConfiguration &cfg = config_->at(j);
> +
> +			/*
> +			 * \todo The PixelFormat must also be compatible with
> +			 * the encoder.
> +			 */
> +			if (cfg.size == streams_[i].size) {
> +				LOG(HAL, Info) << "Stream " << i
> +					       << " using libcamera stream " << j;
> +
> +				match = true;
> +				streams_[i].index = j;
> +			}
> +		}
> +
> +		/*
> +		 * Without a compatible match for JPEG encoding we must
> +		 * introduce a new stream to satisfy the request requirements.
> +		 */
> +		if (!match) {
> +			StreamConfiguration streamConfiguration;
> +
> +			/*
> +			 * \todo: The pixelFormat should be a 'best-fit' choice
> +			 * and may require a validation cycle. This is not yet
> +			 * handled, and should be considered as part of any
> +			 * stream configuration reworks.
> +			 */
> +			streamConfiguration.size.width = stream->width;
> +			streamConfiguration.size.height = stream->height;
> +			streamConfiguration.pixelFormat = formats::NV12;
> +
> +			LOG(HAL, Info) << "Adding " << streamConfiguration.toString()
> +				       << " for MJPEG support";
> +
> +			config_->addConfiguration(streamConfiguration);
> +			streams_[i].index = streamIndex++;
> +		}
>  	}
>
>  	switch (config_->validate()) {
> @@ -1067,6 +1191,18 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>
>  		/* Use the bufferCount confirmed by the validation process. */
>  		stream->max_buffers = cfg.bufferCount;
> +
> +		/*
> +		 * Construct a software compressor for MJPEG streams from the
> +		 * chosen libcamera source stream.
> +		 */
> +		if (cameraStream->format == formats::MJPEG) {
> +			cameraStream->jpeg = new CompressorJPEG();
> +			cameraStream->jpeg->configure(cfg);
> +		} else {
> +			/* Either construct this correctly, or use a better interface */
> +			cameraStream->jpeg = nullptr;
> +		}
>  	}
>
>  	/*
> @@ -1161,6 +1297,10 @@ int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques
>  		descriptor->buffers[i].stream = camera3Buffers[i].stream;
>  		descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
>
> +		/* Software streams are handled after hardware streams complete. */
> +		if (cameraStream->format == formats::MJPEG)
> +			continue;
> +
>  		/*
>  		 * Create a libcamera buffer using the dmabuf descriptors of
>  		 * the camera3Buffer for each stream. The FrameBuffer is
> @@ -1217,8 +1357,76 @@ void CameraDevice::requestComplete(Request *request)
>  	resultMetadata = getResultMetadata(descriptor->frameNumber,
>  					   buffer->metadata().timestamp);
>
> -	/* Prepare to call back the Android camera stack. */
> +	/* Handle any JPEG compression */
> +	for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
> +		CameraStream *cameraStream =
> +			static_cast<CameraStream *>(descriptor->buffers[i].stream->priv);
> +
> +		if (cameraStream->format != formats::MJPEG)
> +			continue;
> +
> +		Compressor *compressor = cameraStream->jpeg;
> +		/* Only handle streams with compression enabled. */
> +		if (!compressor)
> +			continue;

Small nit: Can it happen that format == MJPEG and !compressor ? If it
happen, isn't it an error ?

Also the order of
                Compressor *compressor =
                /* A comment */
                if (!compressor)

is s abit weird


> +
> +		StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index);
> +		Stream *stream = streamConfiguration->stream();
> +		FrameBuffer *buffer = request->findBuffer(stream);
> +		if (!buffer) {
> +			LOG(HAL, Error) << "Failed to find a source stream buffer";
> +			continue;
> +		}
> +
> +		MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer, PROT_READ | PROT_WRITE);
> +		if (!mapped.isValid()) {
> +			LOG(HAL, Error) << "Failed to mmap android blob buffer";
> +			continue;
> +		}
> +
> +		CompressedImage output;
> +		output.data = static_cast<unsigned char *>(mapped.maps()[0].data());
> +		output.length = mapped.maps()[0].size();
> +
> +		int ret = compressor->compress(buffer, &output);
> +		if (ret) {
> +			LOG(HAL, Error) << "Failed to compress stream image";
> +			status = CAMERA3_BUFFER_STATUS_ERROR;
> +		}
>
> +		/*
> +		 * Fill in the JPEG blob header.
> +		 *
> +		 * The mapped size of the buffer is being returned as
> +		 * substantially larger than the requested JPEG_MAX_SIZE
> +		 * (which is referenced from max_jpeg_buffer_size_). Utilise
> +		 * this static size to ensure the correct offset of the blob is
> +		 * determined.
> +		 *
> +		 * \todo Investigate if the buffer size mismatch is an issue or
> +		 * expected behaviour.
> +		 */
> +		uint8_t *resultPtr = mapped.maps()[0].data() +
> +				     max_jpeg_buffer_size_ -
> +				     sizeof(struct camera3_jpeg_blob);
> +		auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
> +		blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
> +		blob->jpeg_size = output.length;
> +
> +		/* Update the JPEG result Metadata. */
> +		resultMetadata->addEntry(ANDROID_JPEG_SIZE,
> +					 &output.length, 1);
> +
> +		const uint32_t jpeg_quality = 95;
> +		resultMetadata->addEntry(ANDROID_JPEG_QUALITY,
> +					 &jpeg_quality, 1);
> +
> +		const uint32_t jpeg_orientation = 0;
> +		resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION,
> +					 &jpeg_orientation, 1);
> +	}
> +
> +	/* Prepare to call back the Android camera stack. */
>  	camera3_capture_result_t captureResult = {};
>  	captureResult.frame_number = descriptor->frameNumber;
>  	captureResult.num_output_buffers = descriptor->numBuffers;
> @@ -1298,10 +1506,10 @@ std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(int frame_number
>  {
>  	/*
>  	 * \todo Keep this in sync with the actual number of entries.
> -	 * Currently: 12 entries, 36 bytes
> +	 * Currently: 18 entries, 62 bytes
>  	 */
>  	std::unique_ptr<CameraMetadata> resultMetadata =
> -		std::make_unique<CameraMetadata>(15, 50);
> +		std::make_unique<CameraMetadata>(18, 62);
>  	if (!resultMetadata->isValid()) {
>  		LOG(HAL, Error) << "Failed to allocate static metadata";
>  		return nullptr;
> diff --git a/src/android/camera_device.h b/src/android/camera_device.h
> index 00472c219388..cfec5abeffa1 100644
> --- a/src/android/camera_device.h
> +++ b/src/android/camera_device.h
> @@ -23,15 +23,25 @@
>  #include "libcamera/internal/log.h"
>  #include "libcamera/internal/message.h"
>
> +#include "jpeg/compressor_jpeg.h"
> +
>  class CameraMetadata;
>
>  struct CameraStream {
> +	CameraStream(libcamera::PixelFormat, libcamera::Size);
> +	~CameraStream();
> +
>  	/*
>  	 * The index of the libcamera StreamConfiguration as added during
>  	 * configureStreams(). A single libcamera Stream may be used to deliver
>  	 * one or more streams to the Android framework.
>  	 */
>  	unsigned int index;
> +
> +	libcamera::PixelFormat format;
> +	libcamera::Size size;
> +
> +	CompressorJPEG *jpeg;
>  };
>
>  class CameraDevice : protected libcamera::Loggable
> @@ -104,6 +114,8 @@ private:
>
>  	int facing_;
>  	int orientation_;
> +
> +	unsigned int max_jpeg_buffer_size_;

This looks good! Thanks for sticking to all the pesky discussions on
v2!

Reviewed-by: Jacopo Mondi <jacopo@jmondi.org>

Thanks
  j

>  };
>
>  #endif /* __ANDROID_CAMERA_DEVICE_H__ */
> --
> 2.25.1
>
> _______________________________________________
> libcamera-devel mailing list
> libcamera-devel@lists.libcamera.org
> https://lists.libcamera.org/listinfo/libcamera-devel
Kieran Bingham Aug. 5, 2020, 1:59 p.m. UTC | #2
Hi Jacopo,

On 05/08/2020 09:03, Jacopo Mondi wrote:
> Hi Kieran,
> 
> On Tue, Aug 04, 2020 at 10:47:11PM +0100, Kieran Bingham wrote:
>> MJPEG streams must be created referencing a libcamera stream.
>> This stream may already be provided by the request configuration,
>> in which case the existing stream is utilised.
>>
>> If no compatible stream is available to encode, a new stream is requested
>> from the libcamera configuration.
>>
>> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
>>
>> ---
>>
>> v3
>>  - Add max jpeg size todo
>>  - Fix metadata allocations
>>  - Use a class member to store the max jpeg buffer size
>>  - Remove the scoping level for jpeg blob header
>>  - Don't rely on the compressor pointer as a flag
>>  - Fix camera metadata size allocations
>> ---
>>  src/android/camera_device.cpp | 226 ++++++++++++++++++++++++++++++++--
>>  src/android/camera_device.h   |  12 ++
>>  2 files changed, 229 insertions(+), 9 deletions(-)
>>
>> diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
>> index c529246e115c..433243c3bc56 100644
>> --- a/src/android/camera_device.cpp
>> +++ b/src/android/camera_device.cpp
>> @@ -8,6 +8,7 @@
>>  #include "camera_device.h"
>>  #include "camera_ops.h"
>>
>> +#include <sys/mman.h>
>>  #include <tuple>
>>  #include <vector>
>>
>> @@ -22,6 +23,8 @@
>>  #include "camera_metadata.h"
>>  #include "system/graphics.h"
>>
>> +#include "jpeg/compressor_jpeg.h"
>> +
>>  using namespace libcamera;
>>
>>  namespace {
>> @@ -129,6 +132,54 @@ const std::map<int, const Camera3Format> camera3FormatsMap = {
>>
>>  LOG_DECLARE_CATEGORY(HAL);
>>
>> +class MappedCamera3Buffer : public MappedBuffer
>> +{
>> +public:
>> +	MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags);
>> +};
>> +
>> +MappedCamera3Buffer::MappedCamera3Buffer(const buffer_handle_t camera3buffer,
>> +					 int flags)
>> +{
>> +	maps_.reserve(camera3buffer->numFds);
>> +	error_ = 0;
>> +
>> +	for (int i = 0; i < camera3buffer->numFds; i++) {
>> +		if (camera3buffer->data[i] == -1)
>> +			continue;
>> +
>> +		off_t length = lseek(camera3buffer->data[i], 0, SEEK_END);
>> +		if (length < 0) {
>> +			error_ = errno;
>> +			LOG(HAL, Error) << "Failed to query plane length";
>> +			break;
>> +		}
>> +
>> +		void *address = mmap(nullptr, length, flags, MAP_SHARED,
>> +				     camera3buffer->data[i], 0);
>> +		if (address == MAP_FAILED) {
>> +			error_ = errno;
>> +			LOG(HAL, Error) << "Failed to mmap plane";
>> +			break;
>> +		}
>> +
>> +		maps_.emplace_back(static_cast<uint8_t *>(address),
>> +				   static_cast<size_t>(length));
>> +	}
>> +
>> +	valid_ = error_ == 0;
>> +}
>> +
>> +CameraStream::CameraStream(PixelFormat f, Size s)
>> +	: index(-1), format(f), size(s), jpeg(nullptr)
>> +{
>> +}
>> +
>> +CameraStream::~CameraStream()
>> +{
>> +	delete jpeg;
>> +};
>> +
>>  /*
>>   * \struct Camera3RequestDescriptor
>>   *
>> @@ -167,6 +218,12 @@ CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camer
>>  	  facing_(CAMERA_FACING_FRONT), orientation_(0)
>>  {
>>  	camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
>> +
>> +	/*
>> +	 * \todo Determine a more accurate value for this during
>> +	 *  streamConfiguration.
>> +	 */
>> +	max_jpeg_buffer_size_ = 13 << 20; /* 13631488 from USB HAL */
>>  }
>>
>>  CameraDevice::~CameraDevice()
>> @@ -417,10 +474,10 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize()
>>  {
>>  	/*
>>  	 * \todo Keep this in sync with the actual number of entries.
>> -	 * Currently: 50 entries, 647 bytes of static metadata
>> +	 * Currently: 51 entries, 667 bytes of static metadata
>>  	 */
>> -	uint32_t numEntries = 50;
>> -	uint32_t byteSize = 651;
>> +	uint32_t numEntries = 51;
>> +	uint32_t byteSize = 667;
>>
>>  	/*
>>  	 * Calculate space occupation in bytes for dynamically built metadata
>> @@ -576,6 +633,12 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>>  				  availableThumbnailSizes.data(),
>>  				  availableThumbnailSizes.size());
>>
>> +	/*
>> +	 * \todo Calculate the maximum JPEG buffer size by asking the compressor
>> +	 * giving the maximum frame size required.
>> +	 */
>> +	staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &max_jpeg_buffer_size_, 1);
>> +
>>  	/* Sensor static metadata. */
>>  	int32_t pixelArraySize[] = {
>>  		2592, 1944,
>> @@ -789,6 +852,7 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>>  		ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
>>  		ANDROID_CONTROL_AVAILABLE_MODES,
>>  		ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
>> +		ANDROID_JPEG_MAX_SIZE,
>>  		ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
>>  		ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
>>  		ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
>> @@ -855,6 +919,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>>  		ANDROID_SENSOR_EXPOSURE_TIME,
>>  		ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
>>  		ANDROID_STATISTICS_SCENE_FLICKER,
>> +		ANDROID_JPEG_SIZE,
>> +		ANDROID_JPEG_QUALITY,
>> +		ANDROID_JPEG_ORIENTATION,
>>  	};
>>  	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
>>  				  availableResultKeys.data(),
>> @@ -1016,8 +1083,10 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>>  	 */
>>  	unsigned int streamIndex = 0;
>>
>> +	/* First handle all non-MJPEG streams. */
>>  	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
>>  		camera3_stream_t *stream = stream_list->streams[i];
>> +		Size size(stream->width, stream->height);
>>
>>  		PixelFormat format = toPixelFormat(stream->format);
>>
>> @@ -1031,16 +1100,71 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>>  		if (!format.isValid())
>>  			return -EINVAL;
>>
>> +		streams_.emplace_back(format, size);
>> +		stream->priv = static_cast<void *>(&streams_[i]);
>> +
>> +		/* Defer handling of MJPEG streams until all others are known. */
>> +		if (format == formats::MJPEG)
>> +			continue;
>> +
>>  		StreamConfiguration streamConfiguration;
>>
>> -		streamConfiguration.size.width = stream->width;
>> -		streamConfiguration.size.height = stream->height;
>> +		streamConfiguration.size = size;
>>  		streamConfiguration.pixelFormat = format;
>>
>>  		config_->addConfiguration(streamConfiguration);
>>
>>  		streams_[i].index = streamIndex++;
>> -		stream->priv = static_cast<void *>(&streams_[i]);
>> +	}
>> +
>> +	/* Now handle MJPEG streams, adding a new stream if required. */
>> +	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
>> +		camera3_stream_t *stream = stream_list->streams[i];
>> +		bool match = false;
>> +
>> +		if (streams_[i].format != formats::MJPEG)
>> +			continue;
>> +
>> +		/* Search for a compatible stream */
>> +		for (unsigned int j = 0; j < config_->size(); j++) {
>> +			StreamConfiguration &cfg = config_->at(j);
>> +
>> +			/*
>> +			 * \todo The PixelFormat must also be compatible with
>> +			 * the encoder.
>> +			 */
>> +			if (cfg.size == streams_[i].size) {
>> +				LOG(HAL, Info) << "Stream " << i
>> +					       << " using libcamera stream " << j;
>> +
>> +				match = true;
>> +				streams_[i].index = j;
>> +			}
>> +		}
>> +
>> +		/*
>> +		 * Without a compatible match for JPEG encoding we must
>> +		 * introduce a new stream to satisfy the request requirements.
>> +		 */
>> +		if (!match) {
>> +			StreamConfiguration streamConfiguration;
>> +
>> +			/*
>> +			 * \todo: The pixelFormat should be a 'best-fit' choice
>> +			 * and may require a validation cycle. This is not yet
>> +			 * handled, and should be considered as part of any
>> +			 * stream configuration reworks.
>> +			 */
>> +			streamConfiguration.size.width = stream->width;
>> +			streamConfiguration.size.height = stream->height;
>> +			streamConfiguration.pixelFormat = formats::NV12;
>> +
>> +			LOG(HAL, Info) << "Adding " << streamConfiguration.toString()
>> +				       << " for MJPEG support";
>> +
>> +			config_->addConfiguration(streamConfiguration);
>> +			streams_[i].index = streamIndex++;
>> +		}
>>  	}
>>
>>  	switch (config_->validate()) {
>> @@ -1067,6 +1191,18 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>>
>>  		/* Use the bufferCount confirmed by the validation process. */
>>  		stream->max_buffers = cfg.bufferCount;
>> +
>> +		/*
>> +		 * Construct a software compressor for MJPEG streams from the
>> +		 * chosen libcamera source stream.
>> +		 */
>> +		if (cameraStream->format == formats::MJPEG) {
>> +			cameraStream->jpeg = new CompressorJPEG();
>> +			cameraStream->jpeg->configure(cfg);
>> +		} else {
>> +			/* Either construct this correctly, or use a better interface */
>> +			cameraStream->jpeg = nullptr;
>> +		}
>>  	}
>>
>>  	/*
>> @@ -1161,6 +1297,10 @@ int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques
>>  		descriptor->buffers[i].stream = camera3Buffers[i].stream;
>>  		descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
>>
>> +		/* Software streams are handled after hardware streams complete. */
>> +		if (cameraStream->format == formats::MJPEG)
>> +			continue;
>> +
>>  		/*
>>  		 * Create a libcamera buffer using the dmabuf descriptors of
>>  		 * the camera3Buffer for each stream. The FrameBuffer is
>> @@ -1217,8 +1357,76 @@ void CameraDevice::requestComplete(Request *request)
>>  	resultMetadata = getResultMetadata(descriptor->frameNumber,
>>  					   buffer->metadata().timestamp);
>>
>> -	/* Prepare to call back the Android camera stack. */
>> +	/* Handle any JPEG compression */
>> +	for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
>> +		CameraStream *cameraStream =
>> +			static_cast<CameraStream *>(descriptor->buffers[i].stream->priv);
>> +
>> +		if (cameraStream->format != formats::MJPEG)
>> +			continue;
>> +
>> +		Compressor *compressor = cameraStream->jpeg;
>> +		/* Only handle streams with compression enabled. */
>> +		if (!compressor)
>> +			continue;
> 
> Small nit: Can it happen that format == MJPEG and !compressor ? If it
> happen, isn't it an error ?
> 

It shouldn't happen; until someone starts doing work on different
compressors ... so lets put an error path in.


> Also the order of
>                 Compressor *compressor =
>                 /* A comment */
>                 if (!compressor)
> 
> is s abit weird


I can remove it.




>> +
>> +		StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index);
>> +		Stream *stream = streamConfiguration->stream();
>> +		FrameBuffer *buffer = request->findBuffer(stream);
>> +		if (!buffer) {
>> +			LOG(HAL, Error) << "Failed to find a source stream buffer";
>> +			continue;
>> +		}
>> +
>> +		MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer, PROT_READ | PROT_WRITE);
>> +		if (!mapped.isValid()) {
>> +			LOG(HAL, Error) << "Failed to mmap android blob buffer";
>> +			continue;
>> +		}
>> +
>> +		CompressedImage output;
>> +		output.data = static_cast<unsigned char *>(mapped.maps()[0].data());
>> +		output.length = mapped.maps()[0].size();
>> +
>> +		int ret = compressor->compress(buffer, &output);
>> +		if (ret) {
>> +			LOG(HAL, Error) << "Failed to compress stream image";
>> +			status = CAMERA3_BUFFER_STATUS_ERROR;
>> +		}
>>
>> +		/*
>> +		 * Fill in the JPEG blob header.
>> +		 *
>> +		 * The mapped size of the buffer is being returned as
>> +		 * substantially larger than the requested JPEG_MAX_SIZE
>> +		 * (which is referenced from max_jpeg_buffer_size_). Utilise
>> +		 * this static size to ensure the correct offset of the blob is
>> +		 * determined.
>> +		 *
>> +		 * \todo Investigate if the buffer size mismatch is an issue or
>> +		 * expected behaviour.
>> +		 */
>> +		uint8_t *resultPtr = mapped.maps()[0].data() +
>> +				     max_jpeg_buffer_size_ -
>> +				     sizeof(struct camera3_jpeg_blob);
>> +		auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
>> +		blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
>> +		blob->jpeg_size = output.length;
>> +
>> +		/* Update the JPEG result Metadata. */
>> +		resultMetadata->addEntry(ANDROID_JPEG_SIZE,
>> +					 &output.length, 1);
>> +
>> +		const uint32_t jpeg_quality = 95;
>> +		resultMetadata->addEntry(ANDROID_JPEG_QUALITY,
>> +					 &jpeg_quality, 1);
>> +
>> +		const uint32_t jpeg_orientation = 0;
>> +		resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION,
>> +					 &jpeg_orientation, 1);
>> +	}
>> +
>> +	/* Prepare to call back the Android camera stack. */
>>  	camera3_capture_result_t captureResult = {};
>>  	captureResult.frame_number = descriptor->frameNumber;
>>  	captureResult.num_output_buffers = descriptor->numBuffers;
>> @@ -1298,10 +1506,10 @@ std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(int frame_number
>>  {
>>  	/*
>>  	 * \todo Keep this in sync with the actual number of entries.
>> -	 * Currently: 12 entries, 36 bytes
>> +	 * Currently: 18 entries, 62 bytes
>>  	 */
>>  	std::unique_ptr<CameraMetadata> resultMetadata =
>> -		std::make_unique<CameraMetadata>(15, 50);
>> +		std::make_unique<CameraMetadata>(18, 62);
>>  	if (!resultMetadata->isValid()) {
>>  		LOG(HAL, Error) << "Failed to allocate static metadata";
>>  		return nullptr;
>> diff --git a/src/android/camera_device.h b/src/android/camera_device.h
>> index 00472c219388..cfec5abeffa1 100644
>> --- a/src/android/camera_device.h
>> +++ b/src/android/camera_device.h
>> @@ -23,15 +23,25 @@
>>  #include "libcamera/internal/log.h"
>>  #include "libcamera/internal/message.h"
>>
>> +#include "jpeg/compressor_jpeg.h"
>> +
>>  class CameraMetadata;
>>
>>  struct CameraStream {
>> +	CameraStream(libcamera::PixelFormat, libcamera::Size);
>> +	~CameraStream();
>> +
>>  	/*
>>  	 * The index of the libcamera StreamConfiguration as added during
>>  	 * configureStreams(). A single libcamera Stream may be used to deliver
>>  	 * one or more streams to the Android framework.
>>  	 */
>>  	unsigned int index;
>> +
>> +	libcamera::PixelFormat format;
>> +	libcamera::Size size;
>> +
>> +	CompressorJPEG *jpeg;
>>  };
>>
>>  class CameraDevice : protected libcamera::Loggable
>> @@ -104,6 +114,8 @@ private:
>>
>>  	int facing_;
>>  	int orientation_;
>> +
>> +	unsigned int max_jpeg_buffer_size_;
> 
> This looks good! Thanks for sticking to all the pesky discussions on
> v2!
> 
> Reviewed-by: Jacopo Mondi <jacopo@jmondi.org>

Thanks!

> 
> Thanks
>   j
> 
>>  };
>>
>>  #endif /* __ANDROID_CAMERA_DEVICE_H__ */
>> --
>> 2.25.1
>>
>> _______________________________________________
>> libcamera-devel mailing list
>> libcamera-devel@lists.libcamera.org
>> https://lists.libcamera.org/listinfo/libcamera-devel
Laurent Pinchart Aug. 5, 2020, 2:25 p.m. UTC | #3
Hi Kieran,

Thank you for the patch.

On Tue, Aug 04, 2020 at 10:47:11PM +0100, Kieran Bingham wrote:
> MJPEG streams must be created referencing a libcamera stream.
> This stream may already be provided by the request configuration,
> in which case the existing stream is utilised.
> 
> If no compatible stream is available to encode, a new stream is requested
> from the libcamera configuration.
> 
> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
> 
> ---
> 
> v3
>  - Add max jpeg size todo
>  - Fix metadata allocations
>  - Use a class member to store the max jpeg buffer size
>  - Remove the scoping level for jpeg blob header
>  - Don't rely on the compressor pointer as a flag
>  - Fix camera metadata size allocations
> ---
>  src/android/camera_device.cpp | 226 ++++++++++++++++++++++++++++++++--
>  src/android/camera_device.h   |  12 ++
>  2 files changed, 229 insertions(+), 9 deletions(-)
> 
> diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
> index c529246e115c..433243c3bc56 100644
> --- a/src/android/camera_device.cpp
> +++ b/src/android/camera_device.cpp
> @@ -8,6 +8,7 @@
>  #include "camera_device.h"
>  #include "camera_ops.h"
>  
> +#include <sys/mman.h>
>  #include <tuple>
>  #include <vector>
>  
> @@ -22,6 +23,8 @@
>  #include "camera_metadata.h"
>  #include "system/graphics.h"
>  
> +#include "jpeg/compressor_jpeg.h"
> +
>  using namespace libcamera;
>  
>  namespace {
> @@ -129,6 +132,54 @@ const std::map<int, const Camera3Format> camera3FormatsMap = {
>  
>  LOG_DECLARE_CATEGORY(HAL);
>  
> +class MappedCamera3Buffer : public MappedBuffer
> +{
> +public:
> +	MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags);
> +};
> +
> +MappedCamera3Buffer::MappedCamera3Buffer(const buffer_handle_t camera3buffer,
> +					 int flags)
> +{
> +	maps_.reserve(camera3buffer->numFds);
> +	error_ = 0;
> +
> +	for (int i = 0; i < camera3buffer->numFds; i++) {
> +		if (camera3buffer->data[i] == -1)
> +			continue;
> +
> +		off_t length = lseek(camera3buffer->data[i], 0, SEEK_END);
> +		if (length < 0) {
> +			error_ = errno;

Should this be -errno ? I think the documentation of the error_ field
states it's negative.

> +			LOG(HAL, Error) << "Failed to query plane length";
> +			break;
> +		}
> +
> +		void *address = mmap(nullptr, length, flags, MAP_SHARED,
> +				     camera3buffer->data[i], 0);
> +		if (address == MAP_FAILED) {
> +			error_ = errno;
> +			LOG(HAL, Error) << "Failed to mmap plane";
> +			break;
> +		}
> +
> +		maps_.emplace_back(static_cast<uint8_t *>(address),
> +				   static_cast<size_t>(length));
> +	}
> +
> +	valid_ = error_ == 0;
> +}
> +
> +CameraStream::CameraStream(PixelFormat f, Size s)
> +	: index(-1), format(f), size(s), jpeg(nullptr)
> +{
> +}
> +
> +CameraStream::~CameraStream()
> +{
> +	delete jpeg;
> +};
> +
>  /*
>   * \struct Camera3RequestDescriptor
>   *
> @@ -167,6 +218,12 @@ CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camer
>  	  facing_(CAMERA_FACING_FRONT), orientation_(0)
>  {
>  	camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
> +
> +	/*
> +	 * \todo Determine a more accurate value for this during
> +	 *  streamConfiguration.
> +	 */
> +	max_jpeg_buffer_size_ = 13 << 20; /* 13631488 from USB HAL */

There's a function in turbojpeg to retrieve the maximum size,
tjBufSize(). It essentially returns width * height * 4 plus some margin
though, so be prepared for big buffers :-)

As the plan seems to be to use the libjpeg RAW API, not the libturbojpeg
API, we may want to reimplement this function. It should be fairly
simple.

Another item for your todo list, the maximum size should be queried from
the Compressor for a given PixelFormat and Size.

>  }
>  
>  CameraDevice::~CameraDevice()
> @@ -417,10 +474,10 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize()
>  {
>  	/*
>  	 * \todo Keep this in sync with the actual number of entries.
> -	 * Currently: 50 entries, 647 bytes of static metadata
> +	 * Currently: 51 entries, 667 bytes of static metadata
>  	 */
> -	uint32_t numEntries = 50;
> -	uint32_t byteSize = 651;
> +	uint32_t numEntries = 51;
> +	uint32_t byteSize = 667;
>  
>  	/*
>  	 * Calculate space occupation in bytes for dynamically built metadata
> @@ -576,6 +633,12 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>  				  availableThumbnailSizes.data(),
>  				  availableThumbnailSizes.size());
>  
> +	/*
> +	 * \todo Calculate the maximum JPEG buffer size by asking the compressor
> +	 * giving the maximum frame size required.
> +	 */

Ah, there we go :-)

> +	staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &max_jpeg_buffer_size_, 1);
> +
>  	/* Sensor static metadata. */
>  	int32_t pixelArraySize[] = {
>  		2592, 1944,
> @@ -789,6 +852,7 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>  		ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
>  		ANDROID_CONTROL_AVAILABLE_MODES,
>  		ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
> +		ANDROID_JPEG_MAX_SIZE,
>  		ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
>  		ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
>  		ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
> @@ -855,6 +919,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>  		ANDROID_SENSOR_EXPOSURE_TIME,
>  		ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
>  		ANDROID_STATISTICS_SCENE_FLICKER,
> +		ANDROID_JPEG_SIZE,
> +		ANDROID_JPEG_QUALITY,
> +		ANDROID_JPEG_ORIENTATION,
>  	};
>  	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
>  				  availableResultKeys.data(),
> @@ -1016,8 +1083,10 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>  	 */
>  	unsigned int streamIndex = 0;
>  
> +	/* First handle all non-MJPEG streams. */
>  	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
>  		camera3_stream_t *stream = stream_list->streams[i];
> +		Size size(stream->width, stream->height);
>  
>  		PixelFormat format = toPixelFormat(stream->format);
>  
> @@ -1031,16 +1100,71 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>  		if (!format.isValid())
>  			return -EINVAL;
>  
> +		streams_.emplace_back(format, size);
> +		stream->priv = static_cast<void *>(&streams_[i]);
> +
> +		/* Defer handling of MJPEG streams until all others are known. */
> +		if (format == formats::MJPEG)
> +			continue;
> +
>  		StreamConfiguration streamConfiguration;
>  
> -		streamConfiguration.size.width = stream->width;
> -		streamConfiguration.size.height = stream->height;
> +		streamConfiguration.size = size;
>  		streamConfiguration.pixelFormat = format;
>  
>  		config_->addConfiguration(streamConfiguration);
>  
>  		streams_[i].index = streamIndex++;
> -		stream->priv = static_cast<void *>(&streams_[i]);
> +	}
> +
> +	/* Now handle MJPEG streams, adding a new stream if required. */
> +	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
> +		camera3_stream_t *stream = stream_list->streams[i];
> +		bool match = false;
> +
> +		if (streams_[i].format != formats::MJPEG)
> +			continue;
> +
> +		/* Search for a compatible stream */
> +		for (unsigned int j = 0; j < config_->size(); j++) {
> +			StreamConfiguration &cfg = config_->at(j);
> +
> +			/*
> +			 * \todo The PixelFormat must also be compatible with
> +			 * the encoder.

We will likely also need to implement software down-scaling (and
possibly even format conversion) support if the hardware can't provide
us with an additional stream that matches the size and pixel format
needed for JPEG compression. Can you capture this in a todo as well ? I
think the code will need to be refactored to handle those additional
streams in a more generic way, not just for JPEG.

> +			 */
> +			if (cfg.size == streams_[i].size) {
> +				LOG(HAL, Info) << "Stream " << i
> +					       << " using libcamera stream " << j;
> +
> +				match = true;
> +				streams_[i].index = j;
> +			}
> +		}
> +
> +		/*
> +		 * Without a compatible match for JPEG encoding we must
> +		 * introduce a new stream to satisfy the request requirements.
> +		 */
> +		if (!match) {
> +			StreamConfiguration streamConfiguration;
> +
> +			/*
> +			 * \todo: The pixelFormat should be a 'best-fit' choice
> +			 * and may require a validation cycle. This is not yet
> +			 * handled, and should be considered as part of any
> +			 * stream configuration reworks.
> +			 */
> +			streamConfiguration.size.width = stream->width;
> +			streamConfiguration.size.height = stream->height;
> +			streamConfiguration.pixelFormat = formats::NV12;
> +
> +			LOG(HAL, Info) << "Adding " << streamConfiguration.toString()
> +				       << " for MJPEG support";
> +
> +			config_->addConfiguration(streamConfiguration);
> +			streams_[i].index = streamIndex++;
> +		}
>  	}
>  
>  	switch (config_->validate()) {
> @@ -1067,6 +1191,18 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>  
>  		/* Use the bufferCount confirmed by the validation process. */
>  		stream->max_buffers = cfg.bufferCount;
> +
> +		/*
> +		 * Construct a software compressor for MJPEG streams from the
> +		 * chosen libcamera source stream.
> +		 */
> +		if (cameraStream->format == formats::MJPEG) {
> +			cameraStream->jpeg = new CompressorJPEG();
> +			cameraStream->jpeg->configure(cfg);
> +		} else {
> +			/* Either construct this correctly, or use a better interface */

I'm not sure what you mean here.

> +			cameraStream->jpeg = nullptr;
> +		}
>  	}
>  
>  	/*
> @@ -1161,6 +1297,10 @@ int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques
>  		descriptor->buffers[i].stream = camera3Buffers[i].stream;
>  		descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
>  
> +		/* Software streams are handled after hardware streams complete. */
> +		if (cameraStream->format == formats::MJPEG)
> +			continue;
> +
>  		/*
>  		 * Create a libcamera buffer using the dmabuf descriptors of
>  		 * the camera3Buffer for each stream. The FrameBuffer is
> @@ -1217,8 +1357,76 @@ void CameraDevice::requestComplete(Request *request)
>  	resultMetadata = getResultMetadata(descriptor->frameNumber,
>  					   buffer->metadata().timestamp);
>  
> -	/* Prepare to call back the Android camera stack. */
> +	/* Handle any JPEG compression */

s/compression/compression./

> +	for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
> +		CameraStream *cameraStream =
> +			static_cast<CameraStream *>(descriptor->buffers[i].stream->priv);
> +
> +		if (cameraStream->format != formats::MJPEG)
> +			continue;
> +
> +		Compressor *compressor = cameraStream->jpeg;
> +		/* Only handle streams with compression enabled. */
> +		if (!compressor)
> +			continue;
> +
> +		StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index);
> +		Stream *stream = streamConfiguration->stream();
> +		FrameBuffer *buffer = request->findBuffer(stream);
> +		if (!buffer) {
> +			LOG(HAL, Error) << "Failed to find a source stream buffer";
> +			continue;
> +		}
> +

Can you add a todo to mention that the code below (buffer mapping and
compression) needs to be moved to a separate thread ?

> +		MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer, PROT_READ | PROT_WRITE);

Line wrap ?

> +		if (!mapped.isValid()) {
> +			LOG(HAL, Error) << "Failed to mmap android blob buffer";
> +			continue;
> +		}
> +
> +		CompressedImage output;
> +		output.data = static_cast<unsigned char *>(mapped.maps()[0].data());
> +		output.length = mapped.maps()[0].size();
> +
> +		int ret = compressor->compress(buffer, &output);
> +		if (ret) {
> +			LOG(HAL, Error) << "Failed to compress stream image";
> +			status = CAMERA3_BUFFER_STATUS_ERROR;

Do we need to proceed to adding the metadata entries then ? Or should we
continue to the next loop iteration ?

> +		}
>  
> +		/*
> +		 * Fill in the JPEG blob header.
> +		 *
> +		 * The mapped size of the buffer is being returned as
> +		 * substantially larger than the requested JPEG_MAX_SIZE
> +		 * (which is referenced from max_jpeg_buffer_size_). Utilise
> +		 * this static size to ensure the correct offset of the blob is
> +		 * determined.
> +		 *
> +		 * \todo Investigate if the buffer size mismatch is an issue or
> +		 * expected behaviour.
> +		 */
> +		uint8_t *resultPtr = mapped.maps()[0].data() +
> +				     max_jpeg_buffer_size_ -
> +				     sizeof(struct camera3_jpeg_blob);
> +		auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
> +		blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
> +		blob->jpeg_size = output.length;
> +
> +		/* Update the JPEG result Metadata. */
> +		resultMetadata->addEntry(ANDROID_JPEG_SIZE,
> +					 &output.length, 1);
> +
> +		const uint32_t jpeg_quality = 95;
> +		resultMetadata->addEntry(ANDROID_JPEG_QUALITY,
> +					 &jpeg_quality, 1);
> +
> +		const uint32_t jpeg_orientation = 0;
> +		resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION,
> +					 &jpeg_orientation, 1);
> +	}
> +
> +	/* Prepare to call back the Android camera stack. */
>  	camera3_capture_result_t captureResult = {};
>  	captureResult.frame_number = descriptor->frameNumber;
>  	captureResult.num_output_buffers = descriptor->numBuffers;
> @@ -1298,10 +1506,10 @@ std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(int frame_number
>  {
>  	/*
>  	 * \todo Keep this in sync with the actual number of entries.
> -	 * Currently: 12 entries, 36 bytes
> +	 * Currently: 18 entries, 62 bytes
>  	 */
>  	std::unique_ptr<CameraMetadata> resultMetadata =
> -		std::make_unique<CameraMetadata>(15, 50);
> +		std::make_unique<CameraMetadata>(18, 62);
>  	if (!resultMetadata->isValid()) {
>  		LOG(HAL, Error) << "Failed to allocate static metadata";
>  		return nullptr;
> diff --git a/src/android/camera_device.h b/src/android/camera_device.h
> index 00472c219388..cfec5abeffa1 100644
> --- a/src/android/camera_device.h
> +++ b/src/android/camera_device.h
> @@ -23,15 +23,25 @@
>  #include "libcamera/internal/log.h"
>  #include "libcamera/internal/message.h"
>  
> +#include "jpeg/compressor_jpeg.h"
> +
>  class CameraMetadata;
>  
>  struct CameraStream {

I'd turn this into a class when you get the chance.

> +	CameraStream(libcamera::PixelFormat, libcamera::Size);
> +	~CameraStream();
> +
>  	/*
>  	 * The index of the libcamera StreamConfiguration as added during
>  	 * configureStreams(). A single libcamera Stream may be used to deliver
>  	 * one or more streams to the Android framework.
>  	 */
>  	unsigned int index;
> +
> +	libcamera::PixelFormat format;
> +	libcamera::Size size;
> +
> +	CompressorJPEG *jpeg;

Should this be a pointer to Compressor, not CompressorJPEG ?

>  };
>  
>  class CameraDevice : protected libcamera::Loggable
> @@ -104,6 +114,8 @@ private:
>  
>  	int facing_;
>  	int orientation_;
> +
> +	unsigned int max_jpeg_buffer_size_;

maxJpegBufferSize_ ?

>  };
>  
>  #endif /* __ANDROID_CAMERA_DEVICE_H__ */
Kieran Bingham Aug. 5, 2020, 3:10 p.m. UTC | #4
Hi Laurent,

On 05/08/2020 15:25, Laurent Pinchart wrote:
> Hi Kieran,
> 
> Thank you for the patch.
> 
> On Tue, Aug 04, 2020 at 10:47:11PM +0100, Kieran Bingham wrote:
>> MJPEG streams must be created referencing a libcamera stream.
>> This stream may already be provided by the request configuration,
>> in which case the existing stream is utilised.
>>
>> If no compatible stream is available to encode, a new stream is requested
>> from the libcamera configuration.
>>
>> Signed-off-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
>>
>> ---
>>
>> v3
>>  - Add max jpeg size todo
>>  - Fix metadata allocations
>>  - Use a class member to store the max jpeg buffer size
>>  - Remove the scoping level for jpeg blob header
>>  - Don't rely on the compressor pointer as a flag
>>  - Fix camera metadata size allocations
>> ---
>>  src/android/camera_device.cpp | 226 ++++++++++++++++++++++++++++++++--
>>  src/android/camera_device.h   |  12 ++
>>  2 files changed, 229 insertions(+), 9 deletions(-)
>>
>> diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
>> index c529246e115c..433243c3bc56 100644
>> --- a/src/android/camera_device.cpp
>> +++ b/src/android/camera_device.cpp
>> @@ -8,6 +8,7 @@
>>  #include "camera_device.h"
>>  #include "camera_ops.h"
>>  
>> +#include <sys/mman.h>
>>  #include <tuple>
>>  #include <vector>
>>  
>> @@ -22,6 +23,8 @@
>>  #include "camera_metadata.h"
>>  #include "system/graphics.h"
>>  
>> +#include "jpeg/compressor_jpeg.h"
>> +
>>  using namespace libcamera;
>>  
>>  namespace {
>> @@ -129,6 +132,54 @@ const std::map<int, const Camera3Format> camera3FormatsMap = {
>>  
>>  LOG_DECLARE_CATEGORY(HAL);
>>  
>> +class MappedCamera3Buffer : public MappedBuffer
>> +{
>> +public:
>> +	MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags);
>> +};
>> +
>> +MappedCamera3Buffer::MappedCamera3Buffer(const buffer_handle_t camera3buffer,
>> +					 int flags)
>> +{
>> +	maps_.reserve(camera3buffer->numFds);
>> +	error_ = 0;
>> +
>> +	for (int i = 0; i < camera3buffer->numFds; i++) {
>> +		if (camera3buffer->data[i] == -1)
>> +			continue;
>> +
>> +		off_t length = lseek(camera3buffer->data[i], 0, SEEK_END);
>> +		if (length < 0) {
>> +			error_ = errno;
> 
> Should this be -errno ? I think the documentation of the error_ field
> states it's negative.

Yes.

> 
>> +			LOG(HAL, Error) << "Failed to query plane length";
>> +			break;
>> +		}
>> +
>> +		void *address = mmap(nullptr, length, flags, MAP_SHARED,
>> +				     camera3buffer->data[i], 0);
>> +		if (address == MAP_FAILED) {
>> +			error_ = errno;

Same here.

>> +			LOG(HAL, Error) << "Failed to mmap plane";
>> +			break;
>> +		}
>> +
>> +		maps_.emplace_back(static_cast<uint8_t *>(address),
>> +				   static_cast<size_t>(length));
>> +	}
>> +
>> +	valid_ = error_ == 0;
>> +}
>> +
>> +CameraStream::CameraStream(PixelFormat f, Size s)
>> +	: index(-1), format(f), size(s), jpeg(nullptr)
>> +{
>> +}
>> +
>> +CameraStream::~CameraStream()
>> +{
>> +	delete jpeg;
>> +};
>> +
>>  /*
>>   * \struct Camera3RequestDescriptor
>>   *
>> @@ -167,6 +218,12 @@ CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camer
>>  	  facing_(CAMERA_FACING_FRONT), orientation_(0)
>>  {
>>  	camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
>> +
>> +	/*
>> +	 * \todo Determine a more accurate value for this during
>> +	 *  streamConfiguration.
>> +	 */
>> +	max_jpeg_buffer_size_ = 13 << 20; /* 13631488 from USB HAL */
> 
> There's a function in turbojpeg to retrieve the maximum size,
> tjBufSize(). It essentially returns width * height * 4 plus some margin
> though, so be prepared for big buffers :-)
> 
> As the plan seems to be to use the libjpeg RAW API, not the libturbojpeg
> API, we may want to reimplement this function. It should be fairly
> simple.
> 
> Another item for your todo list, the maximum size should be queried from
> the Compressor for a given PixelFormat and Size.

It's already in the todos.


> 
>>  }
>>  
>>  CameraDevice::~CameraDevice()
>> @@ -417,10 +474,10 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize()
>>  {
>>  	/*
>>  	 * \todo Keep this in sync with the actual number of entries.
>> -	 * Currently: 50 entries, 647 bytes of static metadata
>> +	 * Currently: 51 entries, 667 bytes of static metadata
>>  	 */
>> -	uint32_t numEntries = 50;
>> -	uint32_t byteSize = 651;
>> +	uint32_t numEntries = 51;
>> +	uint32_t byteSize = 667;
>>  
>>  	/*
>>  	 * Calculate space occupation in bytes for dynamically built metadata
>> @@ -576,6 +633,12 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>>  				  availableThumbnailSizes.data(),
>>  				  availableThumbnailSizes.size());
>>  
>> +	/*
>> +	 * \todo Calculate the maximum JPEG buffer size by asking the compressor
>> +	 * giving the maximum frame size required.
>> +	 */
> 
> Ah, there we go :-)

:-D

> 
>> +	staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &max_jpeg_buffer_size_, 1);
>> +
>>  	/* Sensor static metadata. */
>>  	int32_t pixelArraySize[] = {
>>  		2592, 1944,
>> @@ -789,6 +852,7 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>>  		ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
>>  		ANDROID_CONTROL_AVAILABLE_MODES,
>>  		ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
>> +		ANDROID_JPEG_MAX_SIZE,
>>  		ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
>>  		ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
>>  		ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
>> @@ -855,6 +919,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
>>  		ANDROID_SENSOR_EXPOSURE_TIME,
>>  		ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
>>  		ANDROID_STATISTICS_SCENE_FLICKER,
>> +		ANDROID_JPEG_SIZE,
>> +		ANDROID_JPEG_QUALITY,
>> +		ANDROID_JPEG_ORIENTATION,
>>  	};
>>  	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
>>  				  availableResultKeys.data(),
>> @@ -1016,8 +1083,10 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>>  	 */
>>  	unsigned int streamIndex = 0;
>>  
>> +	/* First handle all non-MJPEG streams. */
>>  	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
>>  		camera3_stream_t *stream = stream_list->streams[i];
>> +		Size size(stream->width, stream->height);
>>  
>>  		PixelFormat format = toPixelFormat(stream->format);
>>  
>> @@ -1031,16 +1100,71 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>>  		if (!format.isValid())
>>  			return -EINVAL;
>>  
>> +		streams_.emplace_back(format, size);
>> +		stream->priv = static_cast<void *>(&streams_[i]);
>> +
>> +		/* Defer handling of MJPEG streams until all others are known. */
>> +		if (format == formats::MJPEG)
>> +			continue;
>> +
>>  		StreamConfiguration streamConfiguration;
>>  
>> -		streamConfiguration.size.width = stream->width;
>> -		streamConfiguration.size.height = stream->height;
>> +		streamConfiguration.size = size;
>>  		streamConfiguration.pixelFormat = format;
>>  
>>  		config_->addConfiguration(streamConfiguration);
>>  
>>  		streams_[i].index = streamIndex++;
>> -		stream->priv = static_cast<void *>(&streams_[i]);
>> +	}
>> +
>> +	/* Now handle MJPEG streams, adding a new stream if required. */
>> +	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
>> +		camera3_stream_t *stream = stream_list->streams[i];
>> +		bool match = false;
>> +
>> +		if (streams_[i].format != formats::MJPEG)
>> +			continue;
>> +
>> +		/* Search for a compatible stream */
>> +		for (unsigned int j = 0; j < config_->size(); j++) {
>> +			StreamConfiguration &cfg = config_->at(j);
>> +
>> +			/*
>> +			 * \todo The PixelFormat must also be compatible with
>> +			 * the encoder.
> 
> We will likely also need to implement software down-scaling (and
> possibly even format conversion) support if the hardware can't provide
> us with an additional stream that matches the size and pixel format
> needed for JPEG compression. Can you capture this in a todo as well ? I
> think the code will need to be refactored to handle those additional
> streams in a more generic way, not just for JPEG.


Yes, That's where I think we need to move out some CameraStream layer.



>> +			 */
>> +			if (cfg.size == streams_[i].size) {
>> +				LOG(HAL, Info) << "Stream " << i
>> +					       << " using libcamera stream " << j;
>> +
>> +				match = true;
>> +				streams_[i].index = j;
>> +			}
>> +		}
>> +
>> +		/*
>> +		 * Without a compatible match for JPEG encoding we must
>> +		 * introduce a new stream to satisfy the request requirements.
>> +		 */
>> +		if (!match) {
>> +			StreamConfiguration streamConfiguration;
>> +
>> +			/*
>> +			 * \todo: The pixelFormat should be a 'best-fit' choice
>> +			 * and may require a validation cycle. This is not yet
>> +			 * handled, and should be considered as part of any
>> +			 * stream configuration reworks.
>> +			 */
>> +			streamConfiguration.size.width = stream->width;
>> +			streamConfiguration.size.height = stream->height;
>> +			streamConfiguration.pixelFormat = formats::NV12;
>> +
>> +			LOG(HAL, Info) << "Adding " << streamConfiguration.toString()
>> +				       << " for MJPEG support";
>> +
>> +			config_->addConfiguration(streamConfiguration);
>> +			streams_[i].index = streamIndex++;
>> +		}
>>  	}
>>  
>>  	switch (config_->validate()) {
>> @@ -1067,6 +1191,18 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
>>  
>>  		/* Use the bufferCount confirmed by the validation process. */
>>  		stream->max_buffers = cfg.bufferCount;
>> +
>> +		/*
>> +		 * Construct a software compressor for MJPEG streams from the
>> +		 * chosen libcamera source stream.
>> +		 */
>> +		if (cameraStream->format == formats::MJPEG) {
>> +			cameraStream->jpeg = new CompressorJPEG();
>> +			cameraStream->jpeg->configure(cfg);
>> +		} else {
>> +			/* Either construct this correctly, or use a better interface */
> 
> I'm not sure what you mean here.

Old comment, can drop this I think.

But essentially somewhere here will need to be a Factory to construct
the right compressor anyway, depending on if it can use the hardware
compressor or not or such. I expect this to all get refactored.


> 
>> +			cameraStream->jpeg = nullptr;
>> +		}
>>  	}
>>  
>>  	/*
>> @@ -1161,6 +1297,10 @@ int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques
>>  		descriptor->buffers[i].stream = camera3Buffers[i].stream;
>>  		descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
>>  
>> +		/* Software streams are handled after hardware streams complete. */
>> +		if (cameraStream->format == formats::MJPEG)
>> +			continue;
>> +
>>  		/*
>>  		 * Create a libcamera buffer using the dmabuf descriptors of
>>  		 * the camera3Buffer for each stream. The FrameBuffer is
>> @@ -1217,8 +1357,76 @@ void CameraDevice::requestComplete(Request *request)
>>  	resultMetadata = getResultMetadata(descriptor->frameNumber,
>>  					   buffer->metadata().timestamp);
>>  
>> -	/* Prepare to call back the Android camera stack. */
>> +	/* Handle any JPEG compression */
> 
> s/compression/compression./
> 
>> +	for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
>> +		CameraStream *cameraStream =
>> +			static_cast<CameraStream *>(descriptor->buffers[i].stream->priv);
>> +
>> +		if (cameraStream->format != formats::MJPEG)
>> +			continue;
>> +
>> +		Compressor *compressor = cameraStream->jpeg;
>> +		/* Only handle streams with compression enabled. */
>> +		if (!compressor)
>> +			continue;
>> +
>> +		StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index);
>> +		Stream *stream = streamConfiguration->stream();
>> +		FrameBuffer *buffer = request->findBuffer(stream);
>> +		if (!buffer) {
>> +			LOG(HAL, Error) << "Failed to find a source stream buffer";
>> +			continue;
>> +		}
>> +
> 
> Can you add a todo to mention that the code below (buffer mapping and
> compression) needs to be moved to a separate thread ?
> 

Done

>> +		MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer, PROT_READ | PROT_WRITE);
> 
> Line wrap ?
> 

done.

>> +		if (!mapped.isValid()) {
>> +			LOG(HAL, Error) << "Failed to mmap android blob buffer";
>> +			continue;
>> +		}
>> +
>> +		CompressedImage output;
>> +		output.data = static_cast<unsigned char *>(mapped.maps()[0].data());
>> +		output.length = mapped.maps()[0].size();
>> +
>> +		int ret = compressor->compress(buffer, &output);
>> +		if (ret) {
>> +			LOG(HAL, Error) << "Failed to compress stream image";
>> +			status = CAMERA3_BUFFER_STATUS_ERROR;
> 
> Do we need to proceed to adding the metadata entries then ? Or should we
> continue to the next loop iteration ?

Lets put in a continue.

> 
>> +		}
>>  
>> +		/*
>> +		 * Fill in the JPEG blob header.
>> +		 *
>> +		 * The mapped size of the buffer is being returned as
>> +		 * substantially larger than the requested JPEG_MAX_SIZE
>> +		 * (which is referenced from max_jpeg_buffer_size_). Utilise
>> +		 * this static size to ensure the correct offset of the blob is
>> +		 * determined.
>> +		 *
>> +		 * \todo Investigate if the buffer size mismatch is an issue or
>> +		 * expected behaviour.
>> +		 */
>> +		uint8_t *resultPtr = mapped.maps()[0].data() +
>> +				     max_jpeg_buffer_size_ -
>> +				     sizeof(struct camera3_jpeg_blob);
>> +		auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
>> +		blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
>> +		blob->jpeg_size = output.length;
>> +
>> +		/* Update the JPEG result Metadata. */
>> +		resultMetadata->addEntry(ANDROID_JPEG_SIZE,
>> +					 &output.length, 1);
>> +
>> +		const uint32_t jpeg_quality = 95;
>> +		resultMetadata->addEntry(ANDROID_JPEG_QUALITY,
>> +					 &jpeg_quality, 1);
>> +
>> +		const uint32_t jpeg_orientation = 0;
>> +		resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION,
>> +					 &jpeg_orientation, 1);
>> +	}
>> +
>> +	/* Prepare to call back the Android camera stack. */
>>  	camera3_capture_result_t captureResult = {};
>>  	captureResult.frame_number = descriptor->frameNumber;
>>  	captureResult.num_output_buffers = descriptor->numBuffers;
>> @@ -1298,10 +1506,10 @@ std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(int frame_number
>>  {
>>  	/*
>>  	 * \todo Keep this in sync with the actual number of entries.
>> -	 * Currently: 12 entries, 36 bytes
>> +	 * Currently: 18 entries, 62 bytes
>>  	 */
>>  	std::unique_ptr<CameraMetadata> resultMetadata =
>> -		std::make_unique<CameraMetadata>(15, 50);
>> +		std::make_unique<CameraMetadata>(18, 62);
>>  	if (!resultMetadata->isValid()) {
>>  		LOG(HAL, Error) << "Failed to allocate static metadata";
>>  		return nullptr;
>> diff --git a/src/android/camera_device.h b/src/android/camera_device.h
>> index 00472c219388..cfec5abeffa1 100644
>> --- a/src/android/camera_device.h
>> +++ b/src/android/camera_device.h
>> @@ -23,15 +23,25 @@
>>  #include "libcamera/internal/log.h"
>>  #include "libcamera/internal/message.h"
>>  
>> +#include "jpeg/compressor_jpeg.h"
>> +
>>  class CameraMetadata;
>>  
>>  struct CameraStream {
> 
> I'd turn this into a class when you get the chance.
> 
>> +	CameraStream(libcamera::PixelFormat, libcamera::Size);
>> +	~CameraStream();
>> +
>>  	/*
>>  	 * The index of the libcamera StreamConfiguration as added during
>>  	 * configureStreams(). A single libcamera Stream may be used to deliver
>>  	 * one or more streams to the Android framework.
>>  	 */
>>  	unsigned int index;
>> +
>> +	libcamera::PixelFormat format;
>> +	libcamera::Size size;
>> +
>> +	CompressorJPEG *jpeg;
> 
> Should this be a pointer to Compressor, not CompressorJPEG ?

Yes, that was the point of Compressor ;-)

> 
>>  };
>>  
>>  class CameraDevice : protected libcamera::Loggable
>> @@ -104,6 +114,8 @@ private:
>>  
>>  	int facing_;
>>  	int orientation_;
>> +
>> +	unsigned int max_jpeg_buffer_size_;
> 
> maxJpegBufferSize_ ?

Yes, not sure what name scheme I was following there...

> 
>>  };
>>  
>>  #endif /* __ANDROID_CAMERA_DEVICE_H__ */
>

Patch

diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
index c529246e115c..433243c3bc56 100644
--- a/src/android/camera_device.cpp
+++ b/src/android/camera_device.cpp
@@ -8,6 +8,7 @@ 
 #include "camera_device.h"
 #include "camera_ops.h"
 
+#include <sys/mman.h>
 #include <tuple>
 #include <vector>
 
@@ -22,6 +23,8 @@ 
 #include "camera_metadata.h"
 #include "system/graphics.h"
 
+#include "jpeg/compressor_jpeg.h"
+
 using namespace libcamera;
 
 namespace {
@@ -129,6 +132,54 @@  const std::map<int, const Camera3Format> camera3FormatsMap = {
 
 LOG_DECLARE_CATEGORY(HAL);
 
+class MappedCamera3Buffer : public MappedBuffer
+{
+public:
+	MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags);
+};
+
+MappedCamera3Buffer::MappedCamera3Buffer(const buffer_handle_t camera3buffer,
+					 int flags)
+{
+	maps_.reserve(camera3buffer->numFds);
+	error_ = 0;
+
+	for (int i = 0; i < camera3buffer->numFds; i++) {
+		if (camera3buffer->data[i] == -1)
+			continue;
+
+		off_t length = lseek(camera3buffer->data[i], 0, SEEK_END);
+		if (length < 0) {
+			error_ = errno;
+			LOG(HAL, Error) << "Failed to query plane length";
+			break;
+		}
+
+		void *address = mmap(nullptr, length, flags, MAP_SHARED,
+				     camera3buffer->data[i], 0);
+		if (address == MAP_FAILED) {
+			error_ = errno;
+			LOG(HAL, Error) << "Failed to mmap plane";
+			break;
+		}
+
+		maps_.emplace_back(static_cast<uint8_t *>(address),
+				   static_cast<size_t>(length));
+	}
+
+	valid_ = error_ == 0;
+}
+
+CameraStream::CameraStream(PixelFormat f, Size s)
+	: index(-1), format(f), size(s), jpeg(nullptr)
+{
+}
+
+CameraStream::~CameraStream()
+{
+	delete jpeg;
+};
+
 /*
  * \struct Camera3RequestDescriptor
  *
@@ -167,6 +218,12 @@  CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camer
 	  facing_(CAMERA_FACING_FRONT), orientation_(0)
 {
 	camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
+
+	/*
+	 * \todo Determine a more accurate value for this during
+	 *  streamConfiguration.
+	 */
+	max_jpeg_buffer_size_ = 13 << 20; /* 13631488 from USB HAL */
 }
 
 CameraDevice::~CameraDevice()
@@ -417,10 +474,10 @@  std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize()
 {
 	/*
 	 * \todo Keep this in sync with the actual number of entries.
-	 * Currently: 50 entries, 647 bytes of static metadata
+	 * Currently: 51 entries, 667 bytes of static metadata
 	 */
-	uint32_t numEntries = 50;
-	uint32_t byteSize = 651;
+	uint32_t numEntries = 51;
+	uint32_t byteSize = 667;
 
 	/*
 	 * Calculate space occupation in bytes for dynamically built metadata
@@ -576,6 +633,12 @@  const camera_metadata_t *CameraDevice::getStaticMetadata()
 				  availableThumbnailSizes.data(),
 				  availableThumbnailSizes.size());
 
+	/*
+	 * \todo Calculate the maximum JPEG buffer size by asking the compressor
+	 * giving the maximum frame size required.
+	 */
+	staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &max_jpeg_buffer_size_, 1);
+
 	/* Sensor static metadata. */
 	int32_t pixelArraySize[] = {
 		2592, 1944,
@@ -789,6 +852,7 @@  const camera_metadata_t *CameraDevice::getStaticMetadata()
 		ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
 		ANDROID_CONTROL_AVAILABLE_MODES,
 		ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+		ANDROID_JPEG_MAX_SIZE,
 		ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
 		ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
 		ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
@@ -855,6 +919,9 @@  const camera_metadata_t *CameraDevice::getStaticMetadata()
 		ANDROID_SENSOR_EXPOSURE_TIME,
 		ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
 		ANDROID_STATISTICS_SCENE_FLICKER,
+		ANDROID_JPEG_SIZE,
+		ANDROID_JPEG_QUALITY,
+		ANDROID_JPEG_ORIENTATION,
 	};
 	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
 				  availableResultKeys.data(),
@@ -1016,8 +1083,10 @@  int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
 	 */
 	unsigned int streamIndex = 0;
 
+	/* First handle all non-MJPEG streams. */
 	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
 		camera3_stream_t *stream = stream_list->streams[i];
+		Size size(stream->width, stream->height);
 
 		PixelFormat format = toPixelFormat(stream->format);
 
@@ -1031,16 +1100,71 @@  int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
 		if (!format.isValid())
 			return -EINVAL;
 
+		streams_.emplace_back(format, size);
+		stream->priv = static_cast<void *>(&streams_[i]);
+
+		/* Defer handling of MJPEG streams until all others are known. */
+		if (format == formats::MJPEG)
+			continue;
+
 		StreamConfiguration streamConfiguration;
 
-		streamConfiguration.size.width = stream->width;
-		streamConfiguration.size.height = stream->height;
+		streamConfiguration.size = size;
 		streamConfiguration.pixelFormat = format;
 
 		config_->addConfiguration(streamConfiguration);
 
 		streams_[i].index = streamIndex++;
-		stream->priv = static_cast<void *>(&streams_[i]);
+	}
+
+	/* Now handle MJPEG streams, adding a new stream if required. */
+	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
+		camera3_stream_t *stream = stream_list->streams[i];
+		bool match = false;
+
+		if (streams_[i].format != formats::MJPEG)
+			continue;
+
+		/* Search for a compatible stream */
+		for (unsigned int j = 0; j < config_->size(); j++) {
+			StreamConfiguration &cfg = config_->at(j);
+
+			/*
+			 * \todo The PixelFormat must also be compatible with
+			 * the encoder.
+			 */
+			if (cfg.size == streams_[i].size) {
+				LOG(HAL, Info) << "Stream " << i
+					       << " using libcamera stream " << j;
+
+				match = true;
+				streams_[i].index = j;
+			}
+		}
+
+		/*
+		 * Without a compatible match for JPEG encoding we must
+		 * introduce a new stream to satisfy the request requirements.
+		 */
+		if (!match) {
+			StreamConfiguration streamConfiguration;
+
+			/*
+			 * \todo: The pixelFormat should be a 'best-fit' choice
+			 * and may require a validation cycle. This is not yet
+			 * handled, and should be considered as part of any
+			 * stream configuration reworks.
+			 */
+			streamConfiguration.size.width = stream->width;
+			streamConfiguration.size.height = stream->height;
+			streamConfiguration.pixelFormat = formats::NV12;
+
+			LOG(HAL, Info) << "Adding " << streamConfiguration.toString()
+				       << " for MJPEG support";
+
+			config_->addConfiguration(streamConfiguration);
+			streams_[i].index = streamIndex++;
+		}
 	}
 
 	switch (config_->validate()) {
@@ -1067,6 +1191,18 @@  int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
 
 		/* Use the bufferCount confirmed by the validation process. */
 		stream->max_buffers = cfg.bufferCount;
+
+		/*
+		 * Construct a software compressor for MJPEG streams from the
+		 * chosen libcamera source stream.
+		 */
+		if (cameraStream->format == formats::MJPEG) {
+			cameraStream->jpeg = new CompressorJPEG();
+			cameraStream->jpeg->configure(cfg);
+		} else {
+			/* Either construct this correctly, or use a better interface */
+			cameraStream->jpeg = nullptr;
+		}
 	}
 
 	/*
@@ -1161,6 +1297,10 @@  int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques
 		descriptor->buffers[i].stream = camera3Buffers[i].stream;
 		descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
 
+		/* Software streams are handled after hardware streams complete. */
+		if (cameraStream->format == formats::MJPEG)
+			continue;
+
 		/*
 		 * Create a libcamera buffer using the dmabuf descriptors of
 		 * the camera3Buffer for each stream. The FrameBuffer is
@@ -1217,8 +1357,76 @@  void CameraDevice::requestComplete(Request *request)
 	resultMetadata = getResultMetadata(descriptor->frameNumber,
 					   buffer->metadata().timestamp);
 
-	/* Prepare to call back the Android camera stack. */
+	/* Handle any JPEG compression */
+	for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
+		CameraStream *cameraStream =
+			static_cast<CameraStream *>(descriptor->buffers[i].stream->priv);
+
+		if (cameraStream->format != formats::MJPEG)
+			continue;
+
+		Compressor *compressor = cameraStream->jpeg;
+		/* Only handle streams with compression enabled. */
+		if (!compressor)
+			continue;
+
+		StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index);
+		Stream *stream = streamConfiguration->stream();
+		FrameBuffer *buffer = request->findBuffer(stream);
+		if (!buffer) {
+			LOG(HAL, Error) << "Failed to find a source stream buffer";
+			continue;
+		}
+
+		MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer, PROT_READ | PROT_WRITE);
+		if (!mapped.isValid()) {
+			LOG(HAL, Error) << "Failed to mmap android blob buffer";
+			continue;
+		}
+
+		CompressedImage output;
+		output.data = static_cast<unsigned char *>(mapped.maps()[0].data());
+		output.length = mapped.maps()[0].size();
+
+		int ret = compressor->compress(buffer, &output);
+		if (ret) {
+			LOG(HAL, Error) << "Failed to compress stream image";
+			status = CAMERA3_BUFFER_STATUS_ERROR;
+		}
 
+		/*
+		 * Fill in the JPEG blob header.
+		 *
+		 * The mapped size of the buffer is being returned as
+		 * substantially larger than the requested JPEG_MAX_SIZE
+		 * (which is referenced from max_jpeg_buffer_size_). Utilise
+		 * this static size to ensure the correct offset of the blob is
+		 * determined.
+		 *
+		 * \todo Investigate if the buffer size mismatch is an issue or
+		 * expected behaviour.
+		 */
+		uint8_t *resultPtr = mapped.maps()[0].data() +
+				     max_jpeg_buffer_size_ -
+				     sizeof(struct camera3_jpeg_blob);
+		auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
+		blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
+		blob->jpeg_size = output.length;
+
+		/* Update the JPEG result Metadata. */
+		resultMetadata->addEntry(ANDROID_JPEG_SIZE,
+					 &output.length, 1);
+
+		const uint32_t jpeg_quality = 95;
+		resultMetadata->addEntry(ANDROID_JPEG_QUALITY,
+					 &jpeg_quality, 1);
+
+		const uint32_t jpeg_orientation = 0;
+		resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION,
+					 &jpeg_orientation, 1);
+	}
+
+	/* Prepare to call back the Android camera stack. */
 	camera3_capture_result_t captureResult = {};
 	captureResult.frame_number = descriptor->frameNumber;
 	captureResult.num_output_buffers = descriptor->numBuffers;
@@ -1298,10 +1506,10 @@  std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(int frame_number
 {
 	/*
 	 * \todo Keep this in sync with the actual number of entries.
-	 * Currently: 12 entries, 36 bytes
+	 * Currently: 18 entries, 62 bytes
 	 */
 	std::unique_ptr<CameraMetadata> resultMetadata =
-		std::make_unique<CameraMetadata>(15, 50);
+		std::make_unique<CameraMetadata>(18, 62);
 	if (!resultMetadata->isValid()) {
 		LOG(HAL, Error) << "Failed to allocate static metadata";
 		return nullptr;
diff --git a/src/android/camera_device.h b/src/android/camera_device.h
index 00472c219388..cfec5abeffa1 100644
--- a/src/android/camera_device.h
+++ b/src/android/camera_device.h
@@ -23,15 +23,25 @@ 
 #include "libcamera/internal/log.h"
 #include "libcamera/internal/message.h"
 
+#include "jpeg/compressor_jpeg.h"
+
 class CameraMetadata;
 
 struct CameraStream {
+	CameraStream(libcamera::PixelFormat, libcamera::Size);
+	~CameraStream();
+
 	/*
 	 * The index of the libcamera StreamConfiguration as added during
 	 * configureStreams(). A single libcamera Stream may be used to deliver
 	 * one or more streams to the Android framework.
 	 */
 	unsigned int index;
+
+	libcamera::PixelFormat format;
+	libcamera::Size size;
+
+	CompressorJPEG *jpeg;
 };
 
 class CameraDevice : protected libcamera::Loggable
@@ -104,6 +114,8 @@  private:
 
 	int facing_;
 	int orientation_;
+
+	unsigned int max_jpeg_buffer_size_;
 };
 
 #endif /* __ANDROID_CAMERA_DEVICE_H__ */