diff --git a/src/libcamera/pipeline/ipu3/ipu3.cpp b/src/libcamera/pipeline/ipu3/ipu3.cpp
index 164e187c769d..caf1051c58ab 100644
--- a/src/libcamera/pipeline/ipu3/ipu3.cpp
+++ b/src/libcamera/pipeline/ipu3/ipu3.cpp
@@ -171,17 +171,61 @@ private:
 		CIO2Device cio2_;
 		ImgUDevice *imgu_;
 
-		Stream stream_;
+		Stream outStream_;
+		Stream vfStream_;
+
+		unsigned int activeStreamsMask;
 	};
 
 	static constexpr unsigned int IPU3_BUFFER_COUNT = 4;
 
+	static constexpr unsigned int IPU3_STREAM_OUTPUT = BIT(0);
+	static constexpr unsigned int IPU3_STREAM_VF = BIT(1);
+
 	IPU3CameraData *cameraData(const Camera *camera)
 	{
 		return static_cast<IPU3CameraData *>(
 			PipelineHandler::cameraData(camera));
 	}
 
+	bool isOutput(IPU3CameraData *data, Stream *stream)
+	{
+		return &data->outStream_ == stream;
+	}
+	bool isOutputActive(IPU3CameraData *data)
+	{
+		return (data->activeStreamsMask & IPU3_STREAM_OUTPUT) ?
+			true : false;
+	}
+	void setOutputActive(IPU3CameraData *data)
+	{
+		data->activeStreamsMask |= IPU3_STREAM_OUTPUT;
+	}
+	bool isViewfinder(IPU3CameraData *data, Stream *stream)
+	{
+		return &data->vfStream_ == stream;
+	}
+	bool isViewfinderActive(IPU3CameraData *data)
+	{
+		return (data->activeStreamsMask & IPU3_STREAM_VF) ?
+			true : false;
+	}
+	void setViewfinderActive(IPU3CameraData *data)
+	{
+		data->activeStreamsMask |= IPU3_STREAM_VF;
+	}
+	bool isStreamActive(IPU3CameraData *data, Stream *stream)
+	{
+		if (isOutput(data, stream) &&
+		    isOutputActive(data))
+			return true;
+		if (isViewfinder(data, stream) &&
+		    isViewfinderActive(data))
+			return true;
+
+		return false;
+	}
+
 	int registerCameras();
 
 	ImgUDevice imgu0_;
@@ -210,7 +254,7 @@ PipelineHandlerIPU3::streamConfiguration(Camera *camera,
 {
 	std::map<Stream *, StreamConfiguration> configs;
 	IPU3CameraData *data = cameraData(camera);
-	StreamConfiguration *config = &configs[&data->stream_];
+	StreamConfiguration config = {};
 
 	/*
 	 * FIXME: Soraka: the maximum resolution reported by both sensors
@@ -220,52 +264,93 @@ PipelineHandlerIPU3::streamConfiguration(Camera *camera,
 	 *
 	 * \todo Clarify ImgU alignement requirements.
 	 */
-	config->width = 2560;
-	config->height = 1920;
-	config->pixelFormat = V4L2_PIX_FMT_NV12;
-	config->bufferCount = IPU3_BUFFER_COUNT;
+	config.width = 2560;
+	config.height = 1920;
+	config.pixelFormat = V4L2_PIX_FMT_NV12;
+	config.bufferCount = IPU3_BUFFER_COUNT;
+
+	configs[&data->outStream_] = config;
+	LOG(IPU3, Debug)
+		<< "Stream 'output' format set to " << config.width << "x"
+		<< config.height << "-0x" << std::hex << std::setfill('0')
+		<< std::setw(8) << config.pixelFormat;
 
+	configs[&data->vfStream_] = config;
 	LOG(IPU3, Debug)
-		<< "Stream format set to " << config->width << "x"
-		<< config->height << "-0x" << std::hex << std::setfill('0')
-		<< std::setw(8) << config->pixelFormat;
+		<< "Stream 'viewfinder' format set to " << config.width << "x"
+		<< config.height << "-0x" << std::hex << std::setfill('0')
+		<< std::setw(8) << config.pixelFormat;
 
 	return configs;
 }
 
 int PipelineHandlerIPU3::configureStreams(Camera *camera,
-					  std::map<Stream *, StreamConfiguration> &config)
+					  std::map<Stream *,
+						   StreamConfiguration> &config)
 {
 	IPU3CameraData *data = cameraData(camera);
-	const StreamConfiguration &cfg = config[&data->stream_];
+	StreamConfiguration CIO2Config = {};
 	CIO2Device *cio2 = &data->cio2_;
 	ImgUDevice *imgu = data->imgu_;
 	int ret;
 
-	LOG(IPU3, Info)
-		<< "Requested image format " << cfg.width << "x"
-		<< cfg.height << "-0x" << std::hex << std::setfill('0')
-		<< std::setw(8) << cfg.pixelFormat << " on camera '"
-		<< camera->name() << "'";
+	/* Remove previously configured stream masks to store the new ones. */
+	data->activeStreamsMask = 0;
+	for (auto const &streamConfig : config) {
+		Stream *stream = streamConfig.first;
+		const StreamConfiguration &cfg = streamConfig.second;
 
-	/*
-	 * Verify that the requested size respects the IPU3 alignement
-	 * requirements (the image width shall be a multiple of 8 pixels and
-	 * its height a multiple of 4 pixels) and the camera maximum sizes.
-	 *
-	 * \todo: consider the BDS scaling factor requirements:
-	 * "the downscaling factor must be an integer value multiple of 1/32"
-	 */
-	if (cfg.width % 8 || cfg.height % 4) {
-		LOG(IPU3, Error) << "Invalid stream size: bad alignment";
-		return -EINVAL;
-	}
+		/*
+		 * Verify that the requested size respects the IPU3 alignement
+		 * requirements (the image width shall be a multiple of 8
+		 * pixels and its height a multiple of 4 pixels) and the camera
+		 * maximum sizes.
+		 *
+		 * \todo: consider the BDS scaling factor requirements: "the
+		 * downscaling factor must be an integer value multiple of
+		 * 1/32"
+		 */
+		if (cfg.width % 8 || cfg.height % 4) {
+			LOG(IPU3, Error)
+				<< "Invalid stream size: bad alignment";
+			return -EINVAL;
+		}
 
-	if (cfg.width > cio2->maxSize_.width ||
-	    cfg.height > cio2->maxSize_.height) {
-		LOG(IPU3, Error)
-			<< "Invalid stream size: larger than sensor resolution";
-		return -EINVAL;
+		if (cfg.width > cio2->maxSize_.width ||
+		    cfg.height > cio2->maxSize_.height) {
+			LOG(IPU3, Error)
+				<< "Invalid stream size: larger than sensor resolution";
+			return -EINVAL;
+		}
+
+		LOG(IPU3, Info)
+			<< "Requested image format " << cfg.width << "x"
+			<< cfg.height << "-0x" << std::hex << std::setw(8)
+			<< cfg.pixelFormat << " on camera'"
+			<< camera->name() << "'";
+
+		/*
+		 * FIXME: As viewfinder should be operated even when
+		 * applications do not intend to use it, we need to keep track
+		 * of which streams have to be configured, to make meaningful
+		 * decisions at configure and request queueing time.
+		 *
+		 * Walk here all the streams to configure and collect the
+		 * active ones in a bitmaks.
+		 */
+		if (isOutput(data, stream))
+			setOutputActive(data);
+		if (isViewfinder(data, stream))
+			setViewfinderActive(data);
+
+		/*
+		 * Collect the maximum width and height: IPU3 can downscale
+		 * only.
+		 */
+		if (cfg.width > CIO2Config.width)
+			CIO2Config.width = cfg.width;
+		if (cfg.height > CIO2Config.height)
+			CIO2Config.height = cfg.height;
 	}
 
 	/*
@@ -281,26 +366,62 @@ int PipelineHandlerIPU3::configureStreams(Camera *camera,
 	 * adjusted format to be propagated to the ImgU output devices.
 	 */
 	V4L2DeviceFormat cio2Format = {};
-	ret = cio2->configure(cfg, &cio2Format);
+	ret = cio2->configure(CIO2Config, &cio2Format);
 	if (ret)
 		return ret;
 
-	ret = imgu->configureInput(cfg, &cio2Format);
+	ret = imgu->configureInput(CIO2Config, &cio2Format);
 	if (ret)
 		return ret;
 
 	/* Apply the format to the ImgU output, viewfinder and stat. */
-	ret = imgu->configureOutput(&imgu->output_, cfg);
-	if (ret)
-		return ret;
+	for (auto const &streamConfig : config) {
+		Stream *stream = streamConfig.first;
+		const StreamConfiguration &cfg = streamConfig.second;
 
-	ret = imgu->configureOutput(&imgu->viewfinder_, cfg);
-	if (ret)
-		return ret;
+		if (isOutput(data, stream)) {
+			ret = imgu->configureOutput(&imgu->output_, cfg);
+			if (ret)
+				return ret;
 
-	ret = imgu->configureOutput(&imgu->stat_, cfg);
-	if (ret)
-		return ret;
+			ret = imgu->configureOutput(&imgu->stat_, cfg);
+			if (ret)
+				return ret;
+
+
+			if (isViewfinderActive(data))
+				continue;
+
+			/*
+			 * Configure viewfinder using the output stream
+			 * configuration if it is not part of the active
+			 * streams list.
+			 */
+			ret = imgu->configureOutput(&imgu->viewfinder_, cfg);
+			if (ret)
+				return ret;
+		} else if (isViewfinder(data, stream)) {
+			ret = imgu->configureOutput(&imgu->viewfinder_, cfg);
+			if (ret)
+				return ret;
+
+			if (isOutputActive(data))
+				continue;
+
+			/*
+			 * Configure output using the viewfinder stream
+			 * configuration if it is not part of the active
+			 * streams list.
+			 */
+			ret = imgu->configureOutput(&imgu->output_, cfg);
+			if (ret)
+				return ret;
+
+			ret = imgu->configureOutput(&imgu->stat_, cfg);
+			if (ret)
+				return ret;
+		}
+	}
 
 	return 0;
 }
@@ -404,7 +525,7 @@ int PipelineHandlerIPU3::queueRequest(Camera *camera, Request *request)
 {
 	IPU3CameraData *data = cameraData(camera);
 	V4L2Device *output = data->imgu_->output_.dev;
-	Stream *stream = &data->stream_;
+	Stream *stream = &data->outStream_;
 
 	/* Queue a buffer to the ImgU output for capture. */
 	Buffer *buffer = request->findBuffer(stream);
@@ -554,7 +675,10 @@ int PipelineHandlerIPU3::registerCameras()
 	for (unsigned int id = 0; id < 4 && numCameras < 2; ++id) {
 		std::unique_ptr<IPU3CameraData> data =
 			utils::make_unique<IPU3CameraData>(this);
-		std::set<Stream *> streams{ &data->stream_ };
+		std::set<Stream *> streams = {
+			&data->outStream_,
+			&data->vfStream_,
+		};
 		CIO2Device *cio2 = &data->cio2_;
 
 		ret = cio2->init(cio2MediaDev_.get(), id);
