@@ -6,6 +6,7 @@
*/
#include <algorithm>
+#include <deque>
#include <fstream>
#include <iomanip>
#include <math.h>
@@ -47,6 +48,17 @@ struct UVCTimingBuf {
__u16 sofDevice;
} __attribute__((packed));
+/* Raw timestamp input to the timestamp calculation function. */
+struct UVCTimestampData {
+ unsigned long long tsHost; /* System clock timestamp in nanoseconds*/
+ unsigned short sofHost; /* The usb clock at the time tsHost was taken*/
+ unsigned int stcDevice; /* The UVC device source timestamp */
+ unsigned short sofDevice; /* the usb clock at the time the STC was taken*/
+
+ /* presentation time stamp to be converted into a system clock timestamp */
+ unsigned int ptsDevice;
+};
+
class UVCCameraData : public Camera::Private
{
public:
@@ -72,15 +84,18 @@ public:
std::map<PixelFormat, std::vector<SizeRange>> formats_;
std::queue<FrameBuffer *> pendingVideoBuffers_;
- std::queue<std::pair<unsigned int, uint64_t>> pendingMetadata_;
+ std::queue<unsigned int> pendingMetadata_;
private:
int initMetadata(MediaDevice *media);
void completeRequest(FrameBuffer *buffer, uint64_t timestamp);
void endCorruptedStream();
+ void addTimestampData(uvc_meta_buf &rawMetadata, UVCTimingBuf &packed);
+ std::deque<UVCTimestampData> timeSamples_;
const unsigned int frameStart_ = 1;
const unsigned int maxVidBuffersInQueue_ = 1;
+ const unsigned int bufferRingSize_ = 32;
bool generateId();
@@ -899,6 +914,13 @@ void UVCCameraData::endCorruptedStream()
<< "UVC metadata stream corrupted. Reverting to driver timestamps.";
}
+unsigned long long calculateTimestamp([[maybe_unused]] const UVCTimestampData &p1,
+ const UVCTimestampData &p2,
+ [[maybe_unused]] const unsigned int PTS)
+{
+ return p2.tsHost;
+}
+
/*
* If there is a metadata buffer that hasn't been matched with a
* video buffer, check to see if it matches this video buffer.
@@ -929,9 +951,17 @@ void UVCCameraData::bufferReady(FrameBuffer *buffer)
if (!pendingMetadata_.empty()) {
/* A metadata buffer was ready first. */
- unsigned int mdSequence = std::get<0>(pendingMetadata_.front()) + frameStart_;
+ unsigned int mdSequence = pendingMetadata_.front() + frameStart_;
if (mdSequence == buffer->metadata().sequence) {
- completeRequest(buffer, std::get<1>(pendingMetadata_.front()));
+ unsigned long long timestamp;
+ if (timeSamples_.size() > 1) {
+ timestamp = calculateTimestamp(timeSamples_.front(),
+ timeSamples_.back(),
+ timeSamples_.back().ptsDevice);
+ } else {
+ timestamp = buffer->metadata().timestamp;
+ }
+ completeRequest(buffer, timestamp);
pendingMetadata_.pop();
return;
} else {
@@ -951,6 +981,28 @@ void UVCCameraData::bufferReady(FrameBuffer *buffer)
}
}
+void UVCCameraData::addTimestampData(uvc_meta_buf &rawMetadata, UVCTimingBuf &packed)
+{
+ /*
+ * Copy over the buffer packet from the raw Metadata
+ * into values we can use. Populate the storage struct
+ * with the data we need to calculate timestamps.
+ * Add to the circular queue.
+ */
+ UVCTimestampData data;
+ data.ptsDevice = packed.pts;
+ data.sofDevice = packed.sofDevice;
+ data.stcDevice = packed.stc;
+ data.sofHost = rawMetadata.sof;
+ data.tsHost = rawMetadata.ns;
+
+ if (timeSamples_.size() == bufferRingSize_) {
+ timeSamples_.pop_front();
+ }
+
+ timeSamples_.push_back(data);
+}
+
void UVCCameraData::bufferReadyMetadata(FrameBuffer *buffer)
{
if (!metadata_ ||
@@ -971,8 +1023,8 @@ void UVCCameraData::bufferReadyMetadata(FrameBuffer *buffer)
Span<uint8_t> memMeta = mappedMetadataBuffers_.at(pos).planes()[0];
uvc_meta_buf *metaBuf = reinterpret_cast<uvc_meta_buf *>(memMeta.data());
- //Span<uint8_t> memTime = mappedMetadataBuffers_.at(pos).planes()[0];
- //UVCTimingBuf * timeBuf = reinterpret_cast<UVCTimingBuf *>(&memTime.data()[sizeof(uvc_meta_buf)]);
+ Span<uint8_t> memTime = mappedMetadataBuffers_.at(pos).planes()[0];
+ UVCTimingBuf *timeBuf = reinterpret_cast<UVCTimingBuf *>(&memTime.data()[sizeof(uvc_meta_buf)]);
size_t UVCPayloadHeaderSize = sizeof(metaBuf->length) +
sizeof(metaBuf->flags) + sizeof(UVCTimingBuf);
@@ -981,6 +1033,8 @@ void UVCCameraData::bufferReadyMetadata(FrameBuffer *buffer)
return;
}
+ addTimestampData(*metaBuf, *timeBuf);
+
/*
* Match a pending video buffer with this buffer's sequence. If
* there is none available, put this timestamp information on the
@@ -992,15 +1046,19 @@ void UVCCameraData::bufferReadyMetadata(FrameBuffer *buffer)
unsigned int vidSequence = vidBuffer->metadata().sequence;
if (vidSequence == mdSequence) {
- completeRequest(vidBuffer, static_cast<uint64_t>(metaBuf->ns));
+ unsigned long long timestamp;
+ if (timeSamples_.size() > 1) {
+ timestamp = calculateTimestamp(timeSamples_.front(),
+ timeSamples_.back(),
+ timeSamples_.back().ptsDevice);
+ }
+ completeRequest(vidBuffer, timestamp);
pendingVideoBuffers_.pop();
} else {
endCorruptedStream();
}
} else {
- pendingMetadata_.push(
- std::make_pair(buffer->metadata().sequence,
- static_cast<uint64_t>(metaBuf->ns)));
+ pendingMetadata_.push(buffer->metadata().sequence);
}
metadata_->queueBuffer(buffer);
}
Parse the raw timing data out of the metadata packets and store each sample in a circular buffer of size 32. To convert the latest frame's metadata into a system clock timestamp, use the metadata time information from 32 frames back stored in the back of the circular buffer. For every metadata buffer that comes in, store the timing information in the circular queue. The information at the head of the queue may be used instantly in the case where a corresponding video buffer has come in, or it may be used when the video buffer eventually does arrive. In any case, the information at the head of the queue will be used when it comes time for the timestamp to be set in the request metadata for the video buffer. If there has only been one frame of metadata, use the default (video buffer-provided) timestamp. If the circular buffer has not yet been filled with 32 frames, use the oldest frame. When the circular buffer reaches 32 frames, remove the oldest item. This approach creates synchronization between the metadata buffer that will be used to update the video buffer's requests' timestamp and the timing data itself, without having to perform checks ensuring synchronization. It also creates easy and efficient access to the front and back of the circular buffer. Signed-off-by: Gabby George <gabbymg94@gmail.com> --- src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 76 +++++++++++++++++--- 1 file changed, 67 insertions(+), 9 deletions(-)