diff --git a/include/libcamera/internal/v4l2_videodevice.h b/include/libcamera/internal/v4l2_videodevice.h
index 57db0036db..82d98184ed 100644
--- a/include/libcamera/internal/v4l2_videodevice.h
+++ b/include/libcamera/internal/v4l2_videodevice.h
@@ -208,6 +208,8 @@ public:
 	int setFormat(V4L2DeviceFormat *format);
 	Formats formats(uint32_t code = 0);
 
+	int getFrameInterval(std::chrono::microseconds *interval);
+
 	int getSelection(unsigned int target, Rectangle *rect);
 	int setSelection(unsigned int target, Rectangle *rect);
 
diff --git a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
index cb8cc82dff..3f98e8ece0 100644
--- a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
+++ b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
@@ -60,6 +60,7 @@ public:
 
 	std::optional<v4l2_exposure_auto_type> autoExposureMode_;
 	std::optional<v4l2_exposure_auto_type> manualExposureMode_;
+	std::optional<std::chrono::microseconds> timePerFrame_;
 
 private:
 	bool generateId();
@@ -295,6 +296,8 @@ int PipelineHandlerUVC::start(Camera *camera, const ControlList *controls)
 	UVCCameraData *data = cameraData(camera);
 	unsigned int count = data->stream_.configuration().bufferCount;
 
+	data->timePerFrame_.reset();
+
 	int ret = data->video_->importBuffers(count);
 	if (ret < 0)
 		return ret;
@@ -309,6 +312,13 @@ int PipelineHandlerUVC::start(Camera *camera, const ControlList *controls)
 	if (ret < 0)
 		goto err_release_buffers;
 
+	if (!data->timePerFrame_) {
+		std::chrono::microseconds interval;
+		ret = data->video_->getFrameInterval(&interval);
+		if (ret == 0)
+			data->timePerFrame_ = interval;
+	}
+
 	return 0;
 
 err_release_buffers:
@@ -898,6 +908,9 @@ void UVCCameraData::imageBufferReady(FrameBuffer *buffer)
 	request->metadata().set(controls::SensorTimestamp,
 				buffer->metadata().timestamp);
 
+	if (timePerFrame_)
+		request->metadata().set(controls::FrameDuration, timePerFrame_->count());
+
 	pipe()->completeBuffer(request, buffer);
 	pipe()->completeRequest(request);
 }
diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp
index 25b61d049a..3836dabef3 100644
--- a/src/libcamera/v4l2_videodevice.cpp
+++ b/src/libcamera/v4l2_videodevice.cpp
@@ -1147,6 +1147,61 @@ V4L2VideoDevice::Formats V4L2VideoDevice::formats(uint32_t code)
 	return formats;
 }
 
+namespace {
+
+std::chrono::microseconds
+v4l2FractionToMs(const v4l2_fract &f)
+{
+	auto seconds = std::chrono::duration<float>(f.numerator) / f.denominator;
+	return std::chrono::duration_cast<std::chrono::microseconds>(seconds);
+}
+
+}
+
+/**
+ * \brief Retrieve the frame interval set on the V4L2 video device
+ * \param[out] interval The frame interval applied on the device
+ *
+ * Retrieve the current time-per-frame parameter from the device.
+ *
+ * \return 0 on success or a negative error code otherwise
+ */
+int V4L2VideoDevice::getFrameInterval(std::chrono::microseconds *interval)
+{
+	const v4l2_fract *frameInterval = nullptr;
+	v4l2_streamparm sparm = {};
+	uint32_t caps = 0;
+
+	sparm.type = bufferType_;
+
+	int ret = ioctl(VIDIOC_G_PARM, &sparm);
+	if (ret)
+		return ret;
+
+	switch (sparm.type) {
+	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
+		frameInterval = &sparm.parm.capture.timeperframe;
+		caps = sparm.parm.capture.capability;
+		break;
+	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
+		frameInterval = &sparm.parm.output.timeperframe;
+		caps = sparm.parm.output.capability;
+		break;
+	}
+
+	if (!frameInterval)
+		return -EINVAL;
+
+	if (!(caps & V4L2_CAP_TIMEPERFRAME))
+		return -ENOTSUP;
+
+	*interval = v4l2FractionToMs(*frameInterval);
+
+	return 0;
+}
+
 std::vector<V4L2PixelFormat> V4L2VideoDevice::enumPixelformats(uint32_t code)
 {
 	std::vector<V4L2PixelFormat> formats;
