Message ID | 20210422094102.371772-8-paul.elder@ideasonboard.com |
---|---|
State | Superseded |
Delegated to: | Paul Elder |
Headers | show |
Series |
|
Related | show |
Hi Paul, Thank you for the patch. On Thu, Apr 22, 2021 at 06:40:57PM +0900, Paul Elder wrote: > Set the result metadata to satisfy FULL hardware level requirements. > Also add the new result metadata tags to the static metadata. > > This fixes many tests under: > - android.hardware.camera2.cts.CaptureRequestTest > - testAeModeAndLock > - testAntiBandingModes > - testAwbModeAndLock > - testBlackLevelLock > - testEdgeModeControlFastFps > - testNoiseReductionModeControlFastFps > - testToneMapControl > - android.hardware.camera2.cts.CaptureResultTest > - testCameraCaptureResultAllKeys > - android.hardware.camera2.cts.ImageReaderTest > - testDiscardFreeBuffers > - testFlexibleYuv > - testJpeg > - testLongProcessingRepeatingFlexibleYuv > - testRepeatingJpeg > - android.hardware.camera2.cts.StaticMetadataTest > - testCapabilities As for patch 05/12, we need to plumb to the pipeline handler anything that we don't expect all cameras to support. > Signed-off-by: Paul Elder <paul.elder@ideasonboard.com> > > --- > Again, not sure if the entries in the results key list needs to be added > to the static metadata allocation size. > --- > src/android/camera_device.cpp | 127 ++++++++++++++++++++++++++++++---- > src/android/camera_device.h | 2 + > 2 files changed, 117 insertions(+), 12 deletions(-) > > diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp > index c9d4afc3..99c67555 100644 > --- a/src/android/camera_device.cpp > +++ b/src/android/camera_device.cpp > @@ -402,7 +402,7 @@ CameraDevice::Camera3RequestDescriptor::Camera3RequestDescriptor( > > CameraDevice::CameraDevice(unsigned int id, std::shared_ptr<Camera> camera) > : id_(id), running_(false), camera_(std::move(camera)), > - facing_(CAMERA_FACING_FRONT), orientation_(0) > + facing_(CAMERA_FACING_FRONT), orientation_(0), lastTimestamp_(0) > { > camera_->requestCompleted.connect(this, &CameraDevice::requestComplete); > > @@ -776,10 +776,10 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize() > { > /* > * \todo Keep this in sync with the actual number of entries. > - * Currently: 63 entries, 990 bytes of static metadata > + * Currently: 63 entries, 1014 bytes of static metadata > */ > uint32_t numEntries = 63; > - uint32_t byteSize = 990; > + uint32_t byteSize = 1014; > > // do i need to add for entries in the available keys? > // +1, +4 for EDGE_AVAILABLE_EDGE_MODES > @@ -787,6 +787,7 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize() > // +1, +4 for BLACK_LEVEL_PATTERN > // +1, +4 for TONEMAP_AVAILABLE_TONE_MAP_MODES > // +1, +4 for TONEMAP_MAX_CURVE_POINTS > + // +4x9 = 36 for the new result tags > > // +36 for new request keys > > @@ -1419,8 +1420,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() > /* > * required for FULL > * \todo get from camera (camCapabilities[camId]->tonemapCurvePoints?) > + * at least 64 > */ > - int32_t tonemapCurvePoints = 0; > + int32_t tonemapCurvePoints = 64; > staticMetadata_->addEntry(ANDROID_TONEMAP_MAX_CURVE_POINTS, > &tonemapCurvePoints, 1); > > @@ -1526,6 +1528,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() > ANDROID_SHADING_MODE, > ANDROID_STATISTICS_FACE_DETECT_MODE, > ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, > + ANDROID_TONEMAP_CURVE_BLUE, > + ANDROID_TONEMAP_CURVE_GREEN, > + ANDROID_TONEMAP_CURVE_RED, > ANDROID_TONEMAP_MODE, > }; > staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, > @@ -1533,6 +1538,7 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() > availableRequestKeys.size()); > > std::vector<int32_t> availableResultKeys = { > + ANDROID_BLACK_LEVEL_LOCK, > ANDROID_COLOR_CORRECTION_ABERRATION_MODE, > ANDROID_CONTROL_AE_ANTIBANDING_MODE, > ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, > @@ -1552,8 +1558,10 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() > ANDROID_CONTROL_MODE, > ANDROID_CONTROL_SCENE_MODE, > ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, > + ANDROID_EDGE_MODE, > ANDROID_FLASH_MODE, > ANDROID_FLASH_STATE, > + ANDROID_HOT_PIXEL_MODE, > ANDROID_JPEG_GPS_COORDINATES, > ANDROID_JPEG_GPS_PROCESSING_METHOD, > ANDROID_JPEG_GPS_TIMESTAMP, > @@ -1563,20 +1571,30 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() > ANDROID_JPEG_THUMBNAIL_QUALITY, > ANDROID_JPEG_THUMBNAIL_SIZE, > ANDROID_LENS_APERTURE, > + ANDROID_LENS_FILTER_DENSITY, > ANDROID_LENS_FOCAL_LENGTH, > + ANDROID_LENS_FOCUS_DISTANCE, > + ANDROID_LENS_FOCUS_RANGE, // undocumented > ANDROID_LENS_OPTICAL_STABILIZATION_MODE, > ANDROID_LENS_STATE, > ANDROID_NOISE_REDUCTION_MODE, > ANDROID_REQUEST_PIPELINE_DEPTH, > ANDROID_SCALER_CROP_REGION, > ANDROID_SENSOR_EXPOSURE_TIME, > + ANDROID_SENSOR_FRAME_DURATION, > ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, > + ANDROID_SENSOR_SENSITIVITY, > ANDROID_SENSOR_TEST_PATTERN_MODE, > ANDROID_SENSOR_TIMESTAMP, > + ANDROID_SHADING_MODE, > ANDROID_STATISTICS_FACE_DETECT_MODE, > ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, > ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, > ANDROID_STATISTICS_SCENE_FLICKER, > + ANDROID_TONEMAP_CURVE_BLUE, > + ANDROID_TONEMAP_CURVE_GREEN, > + ANDROID_TONEMAP_CURVE_RED, > + ANDROID_TONEMAP_MODE, > }; > staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, > availableResultKeys.data(), > @@ -2277,6 +2295,10 @@ void CameraDevice::requestComplete(Request *request) > > resultMetadata = getResultMetadata(descriptor); > > + const ControlList &metadata = descriptor->request_->metadata(); > + if (metadata.contains(controls::SensorTimestamp)) > + lastTimestamp_ = metadata.get(controls::SensorTimestamp); > + > /* Handle any JPEG compression. */ > for (camera3_stream_buffer_t &buffer : descriptor.buffers_) { > CameraStream *cameraStream = > @@ -2416,7 +2438,7 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons > * Total bytes for JPEG metadata: 82 > */ > std::unique_ptr<CameraMetadata> resultMetadata = > - std::make_unique<CameraMetadata>(44, 166); > + std::make_unique<CameraMetadata>(57, 303); > if (!resultMetadata->isValid()) { > LOG(HAL, Error) << "Failed to allocate result metadata"; > return nullptr; > @@ -2428,6 +2450,11 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons > * from libcamera::Request::metadata. > */ > > + /* FULL */ > + found = settings.getEntry(ANDROID_BLACK_LEVEL_LOCK, &entry); > + bool valueBool = found ? *entry.data.u8 : false; > + resultMetadata->addEntry(ANDROID_BLACK_LEVEL_LOCK, &valueBool, 1); > + > uint8_t value = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; > resultMetadata->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, > &value, 1); > @@ -2439,8 +2466,11 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons > resultMetadata->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, > &value32, 1); > > + /* \todo apply this */ > value = ANDROID_CONTROL_AE_LOCK_OFF; > - resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, &value, 1); > + found = settings.getEntry(ANDROID_CONTROL_AE_LOCK, &entry); > + resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, > + found ? entry.data.u8 : &value, 1); > > value = ANDROID_CONTROL_AE_MODE_ON; > resultMetadata->addEntry(ANDROID_CONTROL_AE_MODE, &value, 1); > @@ -2472,12 +2502,16 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons > resultMetadata->addEntry(ANDROID_CONTROL_AF_TRIGGER, &value, 1); > > value = ANDROID_CONTROL_AWB_MODE_AUTO; > - resultMetadata->addEntry(ANDROID_CONTROL_AWB_MODE, &value, 1); > + found = settings.getEntry(ANDROID_CONTROL_AWB_MODE, &entry); > + resultMetadata->addEntry(ANDROID_CONTROL_AWB_MODE, > + found ? entry.data.u8 : &value, 1); > > - value = ANDROID_CONTROL_AWB_LOCK_OFF; > + found = settings.getEntry(ANDROID_CONTROL_AWB_LOCK, &entry); > + value = found ? *entry.data.u8 : ANDROID_CONTROL_AWB_LOCK_OFF; > resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, &value, 1); > > - value = ANDROID_CONTROL_AWB_STATE_CONVERGED; > + value = value ? ANDROID_CONTROL_AWB_STATE_LOCKED : > + ANDROID_CONTROL_AWB_STATE_CONVERGED; > resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, &value, 1); > > value = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; > @@ -2495,18 +2529,37 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons > value = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; > resultMetadata->addEntry(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &value, 1); > > + found = settings.getEntry(ANDROID_EDGE_MODE, &entry); > + value = found ? *entry.data.u8 : ANDROID_EDGE_MODE_OFF; > + resultMetadata->addEntry(ANDROID_EDGE_MODE, &value, 1); > + > value = ANDROID_FLASH_MODE_OFF; > resultMetadata->addEntry(ANDROID_FLASH_MODE, &value, 1); > > value = ANDROID_FLASH_STATE_UNAVAILABLE; > resultMetadata->addEntry(ANDROID_FLASH_STATE, &value, 1); > > + value = ANDROID_HOT_PIXEL_MODE_OFF; > + resultMetadata->addEntry(ANDROID_HOT_PIXEL_MODE, &value, 1); > + > if (settings.getEntry(ANDROID_LENS_APERTURE, &entry)) > resultMetadata->addEntry(ANDROID_LENS_APERTURE, entry.data.f, 1); > > + float filterDensity = 0.0f; > + resultMetadata->addEntry(ANDROID_LENS_FILTER_DENSITY, > + &filterDensity, 1); > + > float focal_length = 1.0; > resultMetadata->addEntry(ANDROID_LENS_FOCAL_LENGTH, &focal_length, 1); > > + float focusDistance = 0.0f; > + resultMetadata->addEntry(ANDROID_LENS_FOCUS_DISTANCE, > + &focusDistance, 1); > + > + /* there's no documentation on this */ > + float focusRange[] = { 0.0f, 1.0f }; > + resultMetadata->addEntry(ANDROID_LENS_FOCUS_RANGE, &focusRange, 2); > + > value = ANDROID_LENS_STATE_STATIONARY; > resultMetadata->addEntry(ANDROID_LENS_STATE, &value, 1); > > @@ -2518,6 +2571,11 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons > resultMetadata->addEntry(ANDROID_SENSOR_TEST_PATTERN_MODE, > &value32, 1); > > + > + /* \todo get this from request? and set it. handle map mode too */ > + value = ANDROID_SHADING_MODE_OFF; > + resultMetadata->addEntry(ANDROID_SHADING_MODE, &value, 1); > + > value = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; > resultMetadata->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE, > &value, 1); > @@ -2534,14 +2592,48 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons > resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER, > &value, 1); > > + /* \todo handle this */ > + found = settings.getEntry(ANDROID_TONEMAP_MODE, &entry); > + value = found ? *entry.data.u8 : ANDROID_TONEMAP_MODE_FAST; > + resultMetadata->addEntry(ANDROID_TONEMAP_MODE, &value, 1); > + > value = ANDROID_NOISE_REDUCTION_MODE_OFF; > - resultMetadata->addEntry(ANDROID_NOISE_REDUCTION_MODE, &value, 1); > + found = settings.getEntry(ANDROID_NOISE_REDUCTION_MODE, &entry); > + resultMetadata->addEntry(ANDROID_NOISE_REDUCTION_MODE, > + found ? entry.data.u8 : &value, 1); > > /* 33.3 msec */ > const int64_t rolling_shutter_skew = 33300000; > resultMetadata->addEntry(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, > &rolling_shutter_skew, 1); > > + /* > + * CTS doesn't actually check if this is equal to the requested > + * tonemap, so just set a linear tonemap > + * it's in [in, out] [in, out] format > + */ > + std::vector<float> tonemap = { > + 0.0, 0.0, > + 1.0, 1.0, > + }; > + resultMetadata->addEntry(ANDROID_TONEMAP_CURVE_BLUE, > + tonemap.data(), tonemap.size()); > + resultMetadata->addEntry(ANDROID_TONEMAP_CURVE_GREEN, > + tonemap.data(), tonemap.size()); > + resultMetadata->addEntry(ANDROID_TONEMAP_CURVE_RED, > + tonemap.data(), tonemap.size()); > + > + /* > + * \todo get the tonemap gamma and tonemap preset curve from request > + * and copy to result > + */ > + > + /* \todo get this from camera */ > + value32 = 32; > + found = settings.getEntry(ANDROID_SENSOR_SENSITIVITY, &entry); > + resultMetadata->addEntry(ANDROID_SENSOR_SENSITIVITY, > + found ? entry.data.i32 : &value32, 1); > + > /* Add metadata tags reported by libcamera. */ > if (metadata.contains(controls::draft::PipelineDepth)) { > uint8_t pipeline_depth = > @@ -2550,15 +2642,26 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons > &pipeline_depth, 1); > } > > - if (metadata.contains(controls::ExposureTime)) { > + found = settings.getEntry(ANDROID_SENSOR_EXPOSURE_TIME, &entry); > + if (found || metadata.contains(controls::ExposureTime)) { > int64_t exposure = metadata.get(controls::ExposureTime) * 1000ULL; > resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME, > - &exposure, 1); > + found ? entry.data.i64 : &exposure, 1); > } > > if (metadata.contains(controls::SensorTimestamp)) { > int64_t timestamp = metadata.get(controls::SensorTimestamp); > resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); > + > + int64_t frameDuration = timestamp - lastTimestamp_; > + /* > + * frame duration should be at last as long as the requested > + * exposure time, hardcode it for now > + */ > + if (found && frameDuration < *entry.data.i64) > + frameDuration = *entry.data.i64; > + resultMetadata->addEntry(ANDROID_SENSOR_FRAME_DURATION, > + &frameDuration, 1); This should be the nominal frame duration, not the calculated frame duration. Haven't we discussed this previously ? > } > > if (metadata.contains(controls::ScalerCrop)) { > diff --git a/src/android/camera_device.h b/src/android/camera_device.h > index 8edbcdfd..fcd57fcd 100644 > --- a/src/android/camera_device.h > +++ b/src/android/camera_device.h > @@ -139,6 +139,8 @@ private: > > unsigned int maxJpegBufferSize_; > > + int64_t lastTimestamp_; > + > CameraMetadata lastSettings_; > }; >
diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp index c9d4afc3..99c67555 100644 --- a/src/android/camera_device.cpp +++ b/src/android/camera_device.cpp @@ -402,7 +402,7 @@ CameraDevice::Camera3RequestDescriptor::Camera3RequestDescriptor( CameraDevice::CameraDevice(unsigned int id, std::shared_ptr<Camera> camera) : id_(id), running_(false), camera_(std::move(camera)), - facing_(CAMERA_FACING_FRONT), orientation_(0) + facing_(CAMERA_FACING_FRONT), orientation_(0), lastTimestamp_(0) { camera_->requestCompleted.connect(this, &CameraDevice::requestComplete); @@ -776,10 +776,10 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize() { /* * \todo Keep this in sync with the actual number of entries. - * Currently: 63 entries, 990 bytes of static metadata + * Currently: 63 entries, 1014 bytes of static metadata */ uint32_t numEntries = 63; - uint32_t byteSize = 990; + uint32_t byteSize = 1014; // do i need to add for entries in the available keys? // +1, +4 for EDGE_AVAILABLE_EDGE_MODES @@ -787,6 +787,7 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize() // +1, +4 for BLACK_LEVEL_PATTERN // +1, +4 for TONEMAP_AVAILABLE_TONE_MAP_MODES // +1, +4 for TONEMAP_MAX_CURVE_POINTS + // +4x9 = 36 for the new result tags // +36 for new request keys @@ -1419,8 +1420,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() /* * required for FULL * \todo get from camera (camCapabilities[camId]->tonemapCurvePoints?) + * at least 64 */ - int32_t tonemapCurvePoints = 0; + int32_t tonemapCurvePoints = 64; staticMetadata_->addEntry(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1); @@ -1526,6 +1528,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() ANDROID_SHADING_MODE, ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, + ANDROID_TONEMAP_CURVE_BLUE, + ANDROID_TONEMAP_CURVE_GREEN, + ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE, }; staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, @@ -1533,6 +1538,7 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() availableRequestKeys.size()); std::vector<int32_t> availableResultKeys = { + ANDROID_BLACK_LEVEL_LOCK, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, @@ -1552,8 +1558,10 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() ANDROID_CONTROL_MODE, ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + ANDROID_EDGE_MODE, ANDROID_FLASH_MODE, ANDROID_FLASH_STATE, + ANDROID_HOT_PIXEL_MODE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP, @@ -1563,20 +1571,30 @@ const camera_metadata_t *CameraDevice::getStaticMetadata() ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, + ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, + ANDROID_LENS_FOCUS_DISTANCE, + ANDROID_LENS_FOCUS_RANGE, // undocumented ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_LENS_STATE, ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_PIPELINE_DEPTH, ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME, + ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, + ANDROID_SENSOR_SENSITIVITY, ANDROID_SENSOR_TEST_PATTERN_MODE, ANDROID_SENSOR_TIMESTAMP, + ANDROID_SHADING_MODE, ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, ANDROID_STATISTICS_SCENE_FLICKER, + ANDROID_TONEMAP_CURVE_BLUE, + ANDROID_TONEMAP_CURVE_GREEN, + ANDROID_TONEMAP_CURVE_RED, + ANDROID_TONEMAP_MODE, }; staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.data(), @@ -2277,6 +2295,10 @@ void CameraDevice::requestComplete(Request *request) resultMetadata = getResultMetadata(descriptor); + const ControlList &metadata = descriptor->request_->metadata(); + if (metadata.contains(controls::SensorTimestamp)) + lastTimestamp_ = metadata.get(controls::SensorTimestamp); + /* Handle any JPEG compression. */ for (camera3_stream_buffer_t &buffer : descriptor.buffers_) { CameraStream *cameraStream = @@ -2416,7 +2438,7 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons * Total bytes for JPEG metadata: 82 */ std::unique_ptr<CameraMetadata> resultMetadata = - std::make_unique<CameraMetadata>(44, 166); + std::make_unique<CameraMetadata>(57, 303); if (!resultMetadata->isValid()) { LOG(HAL, Error) << "Failed to allocate result metadata"; return nullptr; @@ -2428,6 +2450,11 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons * from libcamera::Request::metadata. */ + /* FULL */ + found = settings.getEntry(ANDROID_BLACK_LEVEL_LOCK, &entry); + bool valueBool = found ? *entry.data.u8 : false; + resultMetadata->addEntry(ANDROID_BLACK_LEVEL_LOCK, &valueBool, 1); + uint8_t value = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; resultMetadata->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &value, 1); @@ -2439,8 +2466,11 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons resultMetadata->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &value32, 1); + /* \todo apply this */ value = ANDROID_CONTROL_AE_LOCK_OFF; - resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, &value, 1); + found = settings.getEntry(ANDROID_CONTROL_AE_LOCK, &entry); + resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, + found ? entry.data.u8 : &value, 1); value = ANDROID_CONTROL_AE_MODE_ON; resultMetadata->addEntry(ANDROID_CONTROL_AE_MODE, &value, 1); @@ -2472,12 +2502,16 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons resultMetadata->addEntry(ANDROID_CONTROL_AF_TRIGGER, &value, 1); value = ANDROID_CONTROL_AWB_MODE_AUTO; - resultMetadata->addEntry(ANDROID_CONTROL_AWB_MODE, &value, 1); + found = settings.getEntry(ANDROID_CONTROL_AWB_MODE, &entry); + resultMetadata->addEntry(ANDROID_CONTROL_AWB_MODE, + found ? entry.data.u8 : &value, 1); - value = ANDROID_CONTROL_AWB_LOCK_OFF; + found = settings.getEntry(ANDROID_CONTROL_AWB_LOCK, &entry); + value = found ? *entry.data.u8 : ANDROID_CONTROL_AWB_LOCK_OFF; resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, &value, 1); - value = ANDROID_CONTROL_AWB_STATE_CONVERGED; + value = value ? ANDROID_CONTROL_AWB_STATE_LOCKED : + ANDROID_CONTROL_AWB_STATE_CONVERGED; resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, &value, 1); value = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; @@ -2495,18 +2529,37 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons value = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; resultMetadata->addEntry(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &value, 1); + found = settings.getEntry(ANDROID_EDGE_MODE, &entry); + value = found ? *entry.data.u8 : ANDROID_EDGE_MODE_OFF; + resultMetadata->addEntry(ANDROID_EDGE_MODE, &value, 1); + value = ANDROID_FLASH_MODE_OFF; resultMetadata->addEntry(ANDROID_FLASH_MODE, &value, 1); value = ANDROID_FLASH_STATE_UNAVAILABLE; resultMetadata->addEntry(ANDROID_FLASH_STATE, &value, 1); + value = ANDROID_HOT_PIXEL_MODE_OFF; + resultMetadata->addEntry(ANDROID_HOT_PIXEL_MODE, &value, 1); + if (settings.getEntry(ANDROID_LENS_APERTURE, &entry)) resultMetadata->addEntry(ANDROID_LENS_APERTURE, entry.data.f, 1); + float filterDensity = 0.0f; + resultMetadata->addEntry(ANDROID_LENS_FILTER_DENSITY, + &filterDensity, 1); + float focal_length = 1.0; resultMetadata->addEntry(ANDROID_LENS_FOCAL_LENGTH, &focal_length, 1); + float focusDistance = 0.0f; + resultMetadata->addEntry(ANDROID_LENS_FOCUS_DISTANCE, + &focusDistance, 1); + + /* there's no documentation on this */ + float focusRange[] = { 0.0f, 1.0f }; + resultMetadata->addEntry(ANDROID_LENS_FOCUS_RANGE, &focusRange, 2); + value = ANDROID_LENS_STATE_STATIONARY; resultMetadata->addEntry(ANDROID_LENS_STATE, &value, 1); @@ -2518,6 +2571,11 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons resultMetadata->addEntry(ANDROID_SENSOR_TEST_PATTERN_MODE, &value32, 1); + + /* \todo get this from request? and set it. handle map mode too */ + value = ANDROID_SHADING_MODE_OFF; + resultMetadata->addEntry(ANDROID_SHADING_MODE, &value, 1); + value = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; resultMetadata->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE, &value, 1); @@ -2534,14 +2592,48 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER, &value, 1); + /* \todo handle this */ + found = settings.getEntry(ANDROID_TONEMAP_MODE, &entry); + value = found ? *entry.data.u8 : ANDROID_TONEMAP_MODE_FAST; + resultMetadata->addEntry(ANDROID_TONEMAP_MODE, &value, 1); + value = ANDROID_NOISE_REDUCTION_MODE_OFF; - resultMetadata->addEntry(ANDROID_NOISE_REDUCTION_MODE, &value, 1); + found = settings.getEntry(ANDROID_NOISE_REDUCTION_MODE, &entry); + resultMetadata->addEntry(ANDROID_NOISE_REDUCTION_MODE, + found ? entry.data.u8 : &value, 1); /* 33.3 msec */ const int64_t rolling_shutter_skew = 33300000; resultMetadata->addEntry(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rolling_shutter_skew, 1); + /* + * CTS doesn't actually check if this is equal to the requested + * tonemap, so just set a linear tonemap + * it's in [in, out] [in, out] format + */ + std::vector<float> tonemap = { + 0.0, 0.0, + 1.0, 1.0, + }; + resultMetadata->addEntry(ANDROID_TONEMAP_CURVE_BLUE, + tonemap.data(), tonemap.size()); + resultMetadata->addEntry(ANDROID_TONEMAP_CURVE_GREEN, + tonemap.data(), tonemap.size()); + resultMetadata->addEntry(ANDROID_TONEMAP_CURVE_RED, + tonemap.data(), tonemap.size()); + + /* + * \todo get the tonemap gamma and tonemap preset curve from request + * and copy to result + */ + + /* \todo get this from camera */ + value32 = 32; + found = settings.getEntry(ANDROID_SENSOR_SENSITIVITY, &entry); + resultMetadata->addEntry(ANDROID_SENSOR_SENSITIVITY, + found ? entry.data.i32 : &value32, 1); + /* Add metadata tags reported by libcamera. */ if (metadata.contains(controls::draft::PipelineDepth)) { uint8_t pipeline_depth = @@ -2550,15 +2642,26 @@ CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) cons &pipeline_depth, 1); } - if (metadata.contains(controls::ExposureTime)) { + found = settings.getEntry(ANDROID_SENSOR_EXPOSURE_TIME, &entry); + if (found || metadata.contains(controls::ExposureTime)) { int64_t exposure = metadata.get(controls::ExposureTime) * 1000ULL; resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME, - &exposure, 1); + found ? entry.data.i64 : &exposure, 1); } if (metadata.contains(controls::SensorTimestamp)) { int64_t timestamp = metadata.get(controls::SensorTimestamp); resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); + + int64_t frameDuration = timestamp - lastTimestamp_; + /* + * frame duration should be at last as long as the requested + * exposure time, hardcode it for now + */ + if (found && frameDuration < *entry.data.i64) + frameDuration = *entry.data.i64; + resultMetadata->addEntry(ANDROID_SENSOR_FRAME_DURATION, + &frameDuration, 1); } if (metadata.contains(controls::ScalerCrop)) { diff --git a/src/android/camera_device.h b/src/android/camera_device.h index 8edbcdfd..fcd57fcd 100644 --- a/src/android/camera_device.h +++ b/src/android/camera_device.h @@ -139,6 +139,8 @@ private: unsigned int maxJpegBufferSize_; + int64_t lastTimestamp_; + CameraMetadata lastSettings_; };
Set the result metadata to satisfy FULL hardware level requirements. Also add the new result metadata tags to the static metadata. This fixes many tests under: - android.hardware.camera2.cts.CaptureRequestTest - testAeModeAndLock - testAntiBandingModes - testAwbModeAndLock - testBlackLevelLock - testEdgeModeControlFastFps - testNoiseReductionModeControlFastFps - testToneMapControl - android.hardware.camera2.cts.CaptureResultTest - testCameraCaptureResultAllKeys - android.hardware.camera2.cts.ImageReaderTest - testDiscardFreeBuffers - testFlexibleYuv - testJpeg - testLongProcessingRepeatingFlexibleYuv - testRepeatingJpeg - android.hardware.camera2.cts.StaticMetadataTest - testCapabilities Signed-off-by: Paul Elder <paul.elder@ideasonboard.com> --- Again, not sure if the entries in the results key list needs to be added to the static metadata allocation size. --- src/android/camera_device.cpp | 127 ++++++++++++++++++++++++++++++---- src/android/camera_device.h | 2 + 2 files changed, 117 insertions(+), 12 deletions(-)