Show a patch.

GET /api/1.1/patches/1726/?format=api
HTTP 200 OK
Allow: GET, PUT, PATCH, HEAD, OPTIONS
Content-Type: application/json
Vary: Accept

{
    "id": 1726,
    "url": "https://patchwork.libcamera.org/api/1.1/patches/1726/?format=api",
    "web_url": "https://patchwork.libcamera.org/patch/1726/",
    "project": {
        "id": 1,
        "url": "https://patchwork.libcamera.org/api/1.1/projects/1/?format=api",
        "name": "libcamera",
        "link_name": "libcamera",
        "list_id": "libcamera_core",
        "list_email": "libcamera-devel@lists.libcamera.org",
        "web_url": "",
        "scm_url": "",
        "webscm_url": ""
    },
    "msgid": "<20190801155420.24694-4-jacopo@jmondi.org>",
    "date": "2019-08-01T15:54:18",
    "name": "[libcamera-devel,3/5] include: android: Add Android headers from Cros",
    "commit_ref": null,
    "pull_url": null,
    "state": "superseded",
    "archived": false,
    "hash": "1b36a8eea9b39dd4f4ab203e21d1f431cf557b5b",
    "submitter": {
        "id": 3,
        "url": "https://patchwork.libcamera.org/api/1.1/people/3/?format=api",
        "name": "Jacopo Mondi",
        "email": "jacopo@jmondi.org"
    },
    "delegate": null,
    "mbox": "https://patchwork.libcamera.org/patch/1726/mbox/",
    "series": [
        {
            "id": 444,
            "url": "https://patchwork.libcamera.org/api/1.1/series/444/?format=api",
            "web_url": "https://patchwork.libcamera.org/project/libcamera/list/?series=444",
            "date": "2019-08-01T15:54:15",
            "name": "android: Add initial Camera HAL implementation",
            "version": 1,
            "mbox": "https://patchwork.libcamera.org/series/444/mbox/"
        }
    ],
    "comments": "https://patchwork.libcamera.org/api/patches/1726/comments/",
    "check": "pending",
    "checks": "https://patchwork.libcamera.org/api/patches/1726/checks/",
    "tags": {},
    "headers": {
        "Return-Path": "<jacopo@jmondi.org>",
        "Received": [
            "from relay4-d.mail.gandi.net (relay4-d.mail.gandi.net\n\t[217.70.183.196])\n\tby lancelot.ideasonboard.com (Postfix) with ESMTPS id 1A4BF615DF\n\tfor <libcamera-devel@lists.libcamera.org>;\n\tThu,  1 Aug 2019 17:53:21 +0200 (CEST)",
            "from uno.homenet.telecomitalia.it\n\t(host211-19-dynamic.58-82-r.retail.telecomitalia.it [82.58.19.211])\n\t(Authenticated sender: jacopo@jmondi.org)\n\tby relay4-d.mail.gandi.net (Postfix) with ESMTPSA id BFA17E0008;\n\tThu,  1 Aug 2019 15:53:05 +0000 (UTC)"
        ],
        "X-Originating-IP": "82.58.19.211",
        "From": "Jacopo Mondi <jacopo@jmondi.org>",
        "To": "libcamera-devel@lists.libcamera.org",
        "Date": "Thu,  1 Aug 2019 17:54:18 +0200",
        "Message-Id": "<20190801155420.24694-4-jacopo@jmondi.org>",
        "X-Mailer": "git-send-email 2.22.0",
        "In-Reply-To": "<20190801155420.24694-1-jacopo@jmondi.org>",
        "References": "<20190801155420.24694-1-jacopo@jmondi.org>",
        "MIME-Version": "1.0",
        "Content-Transfer-Encoding": "8bit",
        "X-Mailman-Approved-At": "Thu, 01 Aug 2019 17:54:14 +0200",
        "Subject": "[libcamera-devel] [PATCH 3/5] include: android: Add Android headers\n\tfrom Cros",
        "X-BeenThere": "libcamera-devel@lists.libcamera.org",
        "X-Mailman-Version": "2.1.23",
        "Precedence": "list",
        "List-Id": "<libcamera-devel.lists.libcamera.org>",
        "List-Unsubscribe": "<https://lists.libcamera.org/options/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=unsubscribe>",
        "List-Archive": "<https://lists.libcamera.org/pipermail/libcamera-devel/>",
        "List-Post": "<mailto:libcamera-devel@lists.libcamera.org>",
        "List-Help": "<mailto:libcamera-devel-request@lists.libcamera.org?subject=help>",
        "List-Subscribe": "<https://lists.libcamera.org/listinfo/libcamera-devel>,\n\t<mailto:libcamera-devel-request@lists.libcamera.org?subject=subscribe>",
        "X-List-Received-Date": "Thu, 01 Aug 2019 15:53:21 -0000"
    },
    "content": "Copy the Android Camera3 HAL headers from the ChromiumOS build system\nand define a new inclusion directive in the meson build system for them.\n\nThe header files have been copied from:\nhttps://chromium.googlesource.com/chromiumos/platform2\nat revision ceb477360a8012ee38e80746258f4828aad6b4c7\nand provide:\n\n1) Android CameraHAL3 HAL headers in include/android/hardware/\n2) The Android system headers in include/android/system/\n3) The Android camera metadata headers in include/android/metadata/\n\nThe original path in the Cros platform2/ repository is, respectively:\ncamera/android/header_files/include/hardware\ncamera/android/header_files/include/system\ncamera/android/libcamera_metadata/include/\n\nSigned-off-by: Jacopo Mondi <jacopo@jmondi.org>\n---\n .../libhardware/include/hardware/camera3.h    | 3093 +++++++++++++++++\n .../include/hardware/camera_common.h          |  916 +++++\n .../libhardware/include/hardware/fb.h         |  173 +\n .../libhardware/include/hardware/gralloc.h    |  384 ++\n .../libhardware/include/hardware/hardware.h   |  238 ++\n .../android/metadata/camera_metadata_hidden.h |  100 +\n .../android/metadata/system/camera_metadata.h |  580 ++++\n .../metadata/system/camera_metadata_tags.h    | 1005 ++++++\n .../metadata/system/camera_vendor_tags.h      |  158 +\n .../android/system/core/include/android/log.h |  144 +\n .../system/core/include/cutils/compiler.h     |   44 +\n .../core/include/cutils/native_handle.h       |   69 +\n .../system/core/include/system/camera.h       |  298 ++\n .../system/core/include/system/graphics.h     |  763 ++++\n .../system/core/include/system/window.h       |  954 +++++\n meson.build                                   |    5 +\n 16 files changed, 8924 insertions(+)\n create mode 100644 include/android/hardware/libhardware/include/hardware/camera3.h\n create mode 100644 include/android/hardware/libhardware/include/hardware/camera_common.h\n create mode 100644 include/android/hardware/libhardware/include/hardware/fb.h\n create mode 100644 include/android/hardware/libhardware/include/hardware/gralloc.h\n create mode 100644 include/android/hardware/libhardware/include/hardware/hardware.h\n create mode 100644 include/android/metadata/camera_metadata_hidden.h\n create mode 100644 include/android/metadata/system/camera_metadata.h\n create mode 100644 include/android/metadata/system/camera_metadata_tags.h\n create mode 100644 include/android/metadata/system/camera_vendor_tags.h\n create mode 100644 include/android/system/core/include/android/log.h\n create mode 100644 include/android/system/core/include/cutils/compiler.h\n create mode 100644 include/android/system/core/include/cutils/native_handle.h\n create mode 100644 include/android/system/core/include/system/camera.h\n create mode 100644 include/android/system/core/include/system/graphics.h\n create mode 100644 include/android/system/core/include/system/window.h",
    "diff": "diff --git a/include/android/hardware/libhardware/include/hardware/camera3.h b/include/android/hardware/libhardware/include/hardware/camera3.h\nnew file mode 100644\nindex 000000000000..49664dc3ceb6\n--- /dev/null\n+++ b/include/android/hardware/libhardware/include/hardware/camera3.h\n@@ -0,0 +1,3093 @@\n+/*\n+ * Copyright (C) 2013 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef ANDROID_INCLUDE_CAMERA3_H\n+#define ANDROID_INCLUDE_CAMERA3_H\n+\n+#include <system/camera_metadata.h>\n+#include \"camera_common.h\"\n+\n+/**\n+ * Camera device HAL 3.3 [ CAMERA_DEVICE_API_VERSION_3_3 ]\n+ *\n+ * This is the current recommended version of the camera device HAL.\n+ *\n+ * Supports the android.hardware.Camera API, and as of v3.2, the\n+ * android.hardware.camera2 API in LIMITED or FULL modes.\n+ *\n+ * Camera devices that support this version of the HAL must return\n+ * CAMERA_DEVICE_API_VERSION_3_3 in camera_device_t.common.version and in\n+ * camera_info_t.device_version (from camera_module_t.get_camera_info).\n+ *\n+ * CAMERA_DEVICE_API_VERSION_3_3:\n+ *    Camera modules that may contain version 3.3 devices must implement at\n+ *    least version 2.2 of the camera module interface (as defined by\n+ *    camera_module_t.common.module_api_version).\n+ *\n+ * CAMERA_DEVICE_API_VERSION_3_2:\n+ *    Camera modules that may contain version 3.2 devices must implement at\n+ *    least version 2.2 of the camera module interface (as defined by\n+ *    camera_module_t.common.module_api_version).\n+ *\n+ * <= CAMERA_DEVICE_API_VERSION_3_1:\n+ *    Camera modules that may contain version 3.1 (or 3.0) devices must\n+ *    implement at least version 2.0 of the camera module interface\n+ *    (as defined by camera_module_t.common.module_api_version).\n+ *\n+ * See camera_common.h for more versioning details.\n+ *\n+ * Documentation index:\n+ *   S1. Version history\n+ *   S2. Startup and operation sequencing\n+ *   S3. Operational modes\n+ *   S4. 3A modes and state machines\n+ *   S5. Cropping\n+ *   S6. Error management\n+ *   S7. Key Performance Indicator (KPI) glossary\n+ *   S8. Sample Use Cases\n+ *   S9. Notes on Controls and Metadata\n+ *   S10. Reprocessing flow and controls\n+ */\n+\n+/**\n+ * S1. Version history:\n+ *\n+ * 1.0: Initial Android camera HAL (Android 4.0) [camera.h]:\n+ *\n+ *   - Converted from C++ CameraHardwareInterface abstraction layer.\n+ *\n+ *   - Supports android.hardware.Camera API.\n+ *\n+ * 2.0: Initial release of expanded-capability HAL (Android 4.2) [camera2.h]:\n+ *\n+ *   - Sufficient for implementing existing android.hardware.Camera API.\n+ *\n+ *   - Allows for ZSL queue in camera service layer\n+ *\n+ *   - Not tested for any new features such manual capture control, Bayer RAW\n+ *     capture, reprocessing of RAW data.\n+ *\n+ * 3.0: First revision of expanded-capability HAL:\n+ *\n+ *   - Major version change since the ABI is completely different. No change to\n+ *     the required hardware capabilities or operational model from 2.0.\n+ *\n+ *   - Reworked input request and stream queue interfaces: Framework calls into\n+ *     HAL with next request and stream buffers already dequeued. Sync framework\n+ *     support is included, necessary for efficient implementations.\n+ *\n+ *   - Moved triggers into requests, most notifications into results.\n+ *\n+ *   - Consolidated all callbacks into framework into one structure, and all\n+ *     setup methods into a single initialize() call.\n+ *\n+ *   - Made stream configuration into a single call to simplify stream\n+ *     management. Bidirectional streams replace STREAM_FROM_STREAM construct.\n+ *\n+ *   - Limited mode semantics for older/limited hardware devices.\n+ *\n+ * 3.1: Minor revision of expanded-capability HAL:\n+ *\n+ *   - configure_streams passes consumer usage flags to the HAL.\n+ *\n+ *   - flush call to drop all in-flight requests/buffers as fast as possible.\n+ *\n+ * 3.2: Minor revision of expanded-capability HAL:\n+ *\n+ *   - Deprecates get_metadata_vendor_tag_ops.  Please use get_vendor_tag_ops\n+ *     in camera_common.h instead.\n+ *\n+ *   - register_stream_buffers deprecated. All gralloc buffers provided\n+ *     by framework to HAL in process_capture_request may be new at any time.\n+ *\n+ *   - add partial result support. process_capture_result may be called\n+ *     multiple times with a subset of the available result before the full\n+ *     result is available.\n+ *\n+ *   - add manual template to camera3_request_template. The applications may\n+ *     use this template to control the capture settings directly.\n+ *\n+ *   - Rework the bidirectional and input stream specifications.\n+ *\n+ *   - change the input buffer return path. The buffer is returned in\n+ *     process_capture_result instead of process_capture_request.\n+ *\n+ * 3.3: Minor revision of expanded-capability HAL:\n+ *\n+ *   - OPAQUE and YUV reprocessing API updates.\n+ *\n+ *   - Basic support for depth output buffers.\n+ *\n+ *   - Addition of data_space field to camera3_stream_t.\n+ *\n+ *   - Addition of rotation field to camera3_stream_t.\n+ *\n+ *   - Addition of camera3 stream configuration operation mode to camera3_stream_configuration_t\n+ *\n+ */\n+\n+/**\n+ * S2. Startup and general expected operation sequence:\n+ *\n+ * 1. Framework calls camera_module_t->common.open(), which returns a\n+ *    hardware_device_t structure.\n+ *\n+ * 2. Framework inspects the hardware_device_t->version field, and instantiates\n+ *    the appropriate handler for that version of the camera hardware device. In\n+ *    case the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to\n+ *    a camera3_device_t.\n+ *\n+ * 3. Framework calls camera3_device_t->ops->initialize() with the framework\n+ *    callback function pointers. This will only be called this one time after\n+ *    open(), before any other functions in the ops structure are called.\n+ *\n+ * 4. The framework calls camera3_device_t->ops->configure_streams() with a list\n+ *    of input/output streams to the HAL device.\n+ *\n+ * 5. <= CAMERA_DEVICE_API_VERSION_3_1:\n+ *\n+ *    The framework allocates gralloc buffers and calls\n+ *    camera3_device_t->ops->register_stream_buffers() for at least one of the\n+ *    output streams listed in configure_streams. The same stream is registered\n+ *    only once.\n+ *\n+ *    >= CAMERA_DEVICE_API_VERSION_3_2:\n+ *\n+ *    camera3_device_t->ops->register_stream_buffers() is not called and must\n+ *    be NULL.\n+ *\n+ * 6. The framework requests default settings for some number of use cases with\n+ *    calls to camera3_device_t->ops->construct_default_request_settings(). This\n+ *    may occur any time after step 3.\n+ *\n+ * 7. The framework constructs and sends the first capture request to the HAL,\n+ *    with settings based on one of the sets of default settings, and with at\n+ *    least one output stream, which has been registered earlier by the\n+ *    framework. This is sent to the HAL with\n+ *    camera3_device_t->ops->process_capture_request(). The HAL must block the\n+ *    return of this call until it is ready for the next request to be sent.\n+ *\n+ *    >= CAMERA_DEVICE_API_VERSION_3_2:\n+ *\n+ *    The buffer_handle_t provided in the camera3_stream_buffer_t array\n+ *    in the camera3_capture_request_t may be new and never-before-seen\n+ *    by the HAL on any given new request.\n+ *\n+ * 8. The framework continues to submit requests, and call\n+ *    construct_default_request_settings to get default settings buffers for\n+ *    other use cases.\n+ *\n+ *    <= CAMERA_DEVICE_API_VERSION_3_1:\n+ *\n+ *    The framework may call register_stream_buffers() at this time for\n+ *    not-yet-registered streams.\n+ *\n+ * 9. When the capture of a request begins (sensor starts exposing for the\n+ *    capture) or processing a reprocess request begins, the HAL\n+ *    calls camera3_callback_ops_t->notify() with the SHUTTER event, including\n+ *    the frame number and the timestamp for start of exposure. For a reprocess\n+ *    request, the timestamp must be the start of exposure of the input image\n+ *    which can be looked up with android.sensor.timestamp from\n+ *    camera3_capture_request_t.settings when process_capture_request() is\n+ *    called.\n+ *\n+ *    <= CAMERA_DEVICE_API_VERSION_3_1:\n+ *\n+ *    This notify call must be made before the first call to\n+ *    process_capture_result() for that frame number.\n+ *\n+ *    >= CAMERA_DEVICE_API_VERSION_3_2:\n+ *\n+ *    The camera3_callback_ops_t->notify() call with the SHUTTER event should\n+ *    be made as early as possible since the framework will be unable to\n+ *    deliver gralloc buffers to the application layer (for that frame) until\n+ *    it has a valid timestamp for the start of exposure (or the input image's\n+ *    start of exposure for a reprocess request).\n+ *\n+ *    Both partial metadata results and the gralloc buffers may be sent to the\n+ *    framework at any time before or after the SHUTTER event.\n+ *\n+ * 10. After some pipeline delay, the HAL begins to return completed captures to\n+ *    the framework with camera3_callback_ops_t->process_capture_result(). These\n+ *    are returned in the same order as the requests were submitted. Multiple\n+ *    requests can be in flight at once, depending on the pipeline depth of the\n+ *    camera HAL device.\n+ *\n+ *    >= CAMERA_DEVICE_API_VERSION_3_2:\n+ *\n+ *    Once a buffer is returned by process_capture_result as part of the\n+ *    camera3_stream_buffer_t array, and the fence specified by release_fence\n+ *    has been signaled (this is a no-op for -1 fences), the ownership of that\n+ *    buffer is considered to be transferred back to the framework. After that,\n+ *    the HAL must no longer retain that particular buffer, and the\n+ *    framework may clean up the memory for it immediately.\n+ *\n+ *    process_capture_result may be called multiple times for a single frame,\n+ *    each time with a new disjoint piece of metadata and/or set of gralloc\n+ *    buffers. The framework will accumulate these partial metadata results\n+ *    into one result.\n+ *\n+ *    In particular, it is legal for a process_capture_result to be called\n+ *    simultaneously for both a frame N and a frame N+1 as long as the\n+ *    above rule holds for gralloc buffers (both input and output).\n+ *\n+ * 11. After some time, the framework may stop submitting new requests, wait for\n+ *    the existing captures to complete (all buffers filled, all results\n+ *    returned), and then call configure_streams() again. This resets the camera\n+ *    hardware and pipeline for a new set of input/output streams. Some streams\n+ *    may be reused from the previous configuration; if these streams' buffers\n+ *    had already been registered with the HAL, they will not be registered\n+ *    again. The framework then continues from step 7, if at least one\n+ *    registered output stream remains (otherwise, step 5 is required first).\n+ *\n+ * 12. Alternatively, the framework may call camera3_device_t->common->close()\n+ *    to end the camera session. This may be called at any time when no other\n+ *    calls from the framework are active, although the call may block until all\n+ *    in-flight captures have completed (all results returned, all buffers\n+ *    filled). After the close call returns, no more calls to the\n+ *    camera3_callback_ops_t functions are allowed from the HAL. Once the\n+ *    close() call is underway, the framework may not call any other HAL device\n+ *    functions.\n+ *\n+ * 13. In case of an error or other asynchronous event, the HAL must call\n+ *    camera3_callback_ops_t->notify() with the appropriate error/event\n+ *    message. After returning from a fatal device-wide error notification, the\n+ *    HAL should act as if close() had been called on it. However, the HAL must\n+ *    either cancel or complete all outstanding captures before calling\n+ *    notify(), so that once notify() is called with a fatal error, the\n+ *    framework will not receive further callbacks from the device. Methods\n+ *    besides close() should return -ENODEV or NULL after the notify() method\n+ *    returns from a fatal error message.\n+ */\n+\n+/**\n+ * S3. Operational modes:\n+ *\n+ * The camera 3 HAL device can implement one of two possible operational modes;\n+ * limited and full. Full support is expected from new higher-end\n+ * devices. Limited mode has hardware requirements roughly in line with those\n+ * for a camera HAL device v1 implementation, and is expected from older or\n+ * inexpensive devices. Full is a strict superset of limited, and they share the\n+ * same essential operational flow, as documented above.\n+ *\n+ * The HAL must indicate its level of support with the\n+ * android.info.supportedHardwareLevel static metadata entry, with 0 indicating\n+ * limited mode, and 1 indicating full mode support.\n+ *\n+ * Roughly speaking, limited-mode devices do not allow for application control\n+ * of capture settings (3A control only), high-rate capture of high-resolution\n+ * images, raw sensor readout, or support for YUV output streams above maximum\n+ * recording resolution (JPEG only for large images).\n+ *\n+ * ** Details of limited mode behavior:\n+ *\n+ * - Limited-mode devices do not need to implement accurate synchronization\n+ *   between capture request settings and the actual image data\n+ *   captured. Instead, changes to settings may take effect some time in the\n+ *   future, and possibly not for the same output frame for each settings\n+ *   entry. Rapid changes in settings may result in some settings never being\n+ *   used for a capture. However, captures that include high-resolution output\n+ *   buffers ( > 1080p ) have to use the settings as specified (but see below\n+ *   for processing rate).\n+ *\n+ * - Limited-mode devices do not need to support most of the\n+ *   settings/result/static info metadata. Specifically, only the following settings\n+ *   are expected to be consumed or produced by a limited-mode HAL device:\n+ *\n+ *   android.control.aeAntibandingMode (controls and dynamic)\n+ *   android.control.aeExposureCompensation (controls and dynamic)\n+ *   android.control.aeLock (controls and dynamic)\n+ *   android.control.aeMode (controls and dynamic)\n+ *   android.control.aeRegions (controls and dynamic)\n+ *   android.control.aeTargetFpsRange (controls and dynamic)\n+ *   android.control.aePrecaptureTrigger (controls and dynamic)\n+ *   android.control.afMode (controls and dynamic)\n+ *   android.control.afRegions (controls and dynamic)\n+ *   android.control.awbLock (controls and dynamic)\n+ *   android.control.awbMode (controls and dynamic)\n+ *   android.control.awbRegions (controls and dynamic)\n+ *   android.control.captureIntent (controls and dynamic)\n+ *   android.control.effectMode (controls and dynamic)\n+ *   android.control.mode (controls and dynamic)\n+ *   android.control.sceneMode (controls and dynamic)\n+ *   android.control.videoStabilizationMode (controls and dynamic)\n+ *   android.control.aeAvailableAntibandingModes (static)\n+ *   android.control.aeAvailableModes (static)\n+ *   android.control.aeAvailableTargetFpsRanges (static)\n+ *   android.control.aeCompensationRange (static)\n+ *   android.control.aeCompensationStep (static)\n+ *   android.control.afAvailableModes (static)\n+ *   android.control.availableEffects (static)\n+ *   android.control.availableSceneModes (static)\n+ *   android.control.availableVideoStabilizationModes (static)\n+ *   android.control.awbAvailableModes (static)\n+ *   android.control.maxRegions (static)\n+ *   android.control.sceneModeOverrides (static)\n+ *   android.control.aeState (dynamic)\n+ *   android.control.afState (dynamic)\n+ *   android.control.awbState (dynamic)\n+ *\n+ *   android.flash.mode (controls and dynamic)\n+ *   android.flash.info.available (static)\n+ *\n+ *   android.info.supportedHardwareLevel (static)\n+ *\n+ *   android.jpeg.gpsCoordinates (controls and dynamic)\n+ *   android.jpeg.gpsProcessingMethod (controls and dynamic)\n+ *   android.jpeg.gpsTimestamp (controls and dynamic)\n+ *   android.jpeg.orientation (controls and dynamic)\n+ *   android.jpeg.quality (controls and dynamic)\n+ *   android.jpeg.thumbnailQuality (controls and dynamic)\n+ *   android.jpeg.thumbnailSize (controls and dynamic)\n+ *   android.jpeg.availableThumbnailSizes (static)\n+ *   android.jpeg.maxSize (static)\n+ *\n+ *   android.lens.info.minimumFocusDistance (static)\n+ *\n+ *   android.request.id (controls and dynamic)\n+ *\n+ *   android.scaler.cropRegion (controls and dynamic)\n+ *   android.scaler.availableStreamConfigurations (static)\n+ *   android.scaler.availableMinFrameDurations (static)\n+ *   android.scaler.availableStallDurations (static)\n+ *   android.scaler.availableMaxDigitalZoom (static)\n+ *   android.scaler.maxDigitalZoom (static)\n+ *   android.scaler.croppingType (static)\n+ *\n+ *   android.sensor.orientation (static)\n+ *   android.sensor.timestamp (dynamic)\n+ *\n+ *   android.statistics.faceDetectMode (controls and dynamic)\n+ *   android.statistics.info.availableFaceDetectModes (static)\n+ *   android.statistics.faceIds (dynamic)\n+ *   android.statistics.faceLandmarks (dynamic)\n+ *   android.statistics.faceRectangles (dynamic)\n+ *   android.statistics.faceScores (dynamic)\n+ *\n+ *   android.sync.frameNumber (dynamic)\n+ *   android.sync.maxLatency (static)\n+ *\n+ * - Captures in limited mode that include high-resolution (> 1080p) output\n+ *   buffers may block in process_capture_request() until all the output buffers\n+ *   have been filled. A full-mode HAL device must process sequences of\n+ *   high-resolution requests at the rate indicated in the static metadata for\n+ *   that pixel format. The HAL must still call process_capture_result() to\n+ *   provide the output; the framework must simply be prepared for\n+ *   process_capture_request() to block until after process_capture_result() for\n+ *   that request completes for high-resolution captures for limited-mode\n+ *   devices.\n+ *\n+ * - Full-mode devices must support below additional capabilities:\n+ *   - 30fps at maximum resolution is preferred, more than 20fps is required.\n+ *   - Per frame control (android.sync.maxLatency == PER_FRAME_CONTROL).\n+ *   - Sensor manual control metadata. See MANUAL_SENSOR defined in\n+ *     android.request.availableCapabilities.\n+ *   - Post-processing manual control metadata. See MANUAL_POST_PROCESSING defined\n+ *     in android.request.availableCapabilities.\n+ *\n+ */\n+\n+/**\n+ * S4. 3A modes and state machines:\n+ *\n+ * While the actual 3A algorithms are up to the HAL implementation, a high-level\n+ * state machine description is defined by the HAL interface, to allow the HAL\n+ * device and the framework to communicate about the current state of 3A, and to\n+ * trigger 3A events.\n+ *\n+ * When the device is opened, all the individual 3A states must be\n+ * STATE_INACTIVE. Stream configuration does not reset 3A. For example, locked\n+ * focus must be maintained across the configure() call.\n+ *\n+ * Triggering a 3A action involves simply setting the relevant trigger entry in\n+ * the settings for the next request to indicate start of trigger. For example,\n+ * the trigger for starting an autofocus scan is setting the entry\n+ * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTROL_AF_TRIGGER_START for one\n+ * request, and cancelling an autofocus scan is triggered by setting\n+ * ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTRL_AF_TRIGGER_CANCEL. Otherwise,\n+ * the entry will not exist, or be set to ANDROID_CONTROL_AF_TRIGGER_IDLE. Each\n+ * request with a trigger entry set to a non-IDLE value will be treated as an\n+ * independent triggering event.\n+ *\n+ * At the top level, 3A is controlled by the ANDROID_CONTROL_MODE setting, which\n+ * selects between no 3A (ANDROID_CONTROL_MODE_OFF), normal AUTO mode\n+ * (ANDROID_CONTROL_MODE_AUTO), and using the scene mode setting\n+ * (ANDROID_CONTROL_USE_SCENE_MODE).\n+ *\n+ * - In OFF mode, each of the individual AE/AF/AWB modes are effectively OFF,\n+ *   and none of the capture controls may be overridden by the 3A routines.\n+ *\n+ * - In AUTO mode, Auto-focus, auto-exposure, and auto-whitebalance all run\n+ *   their own independent algorithms, and have their own mode, state, and\n+ *   trigger metadata entries, as listed in the next section.\n+ *\n+ * - In USE_SCENE_MODE, the value of the ANDROID_CONTROL_SCENE_MODE entry must\n+ *   be used to determine the behavior of 3A routines. In SCENE_MODEs other than\n+ *   FACE_PRIORITY, the HAL must override the values of\n+ *   ANDROId_CONTROL_AE/AWB/AF_MODE to be the mode it prefers for the selected\n+ *   SCENE_MODE. For example, the HAL may prefer SCENE_MODE_NIGHT to use\n+ *   CONTINUOUS_FOCUS AF mode. Any user selection of AE/AWB/AF_MODE when scene\n+ *   must be ignored for these scene modes.\n+ *\n+ * - For SCENE_MODE_FACE_PRIORITY, the AE/AWB/AF_MODE controls work as in\n+ *   ANDROID_CONTROL_MODE_AUTO, but the 3A routines must bias toward metering\n+ *   and focusing on any detected faces in the scene.\n+ *\n+ * S4.1. Auto-focus settings and result entries:\n+ *\n+ *  Main metadata entries:\n+ *\n+ *   ANDROID_CONTROL_AF_MODE: Control for selecting the current autofocus\n+ *      mode. Set by the framework in the request settings.\n+ *\n+ *     AF_MODE_OFF: AF is disabled; the framework/app directly controls lens\n+ *         position.\n+ *\n+ *     AF_MODE_AUTO: Single-sweep autofocus. No lens movement unless AF is\n+ *         triggered.\n+ *\n+ *     AF_MODE_MACRO: Single-sweep up-close autofocus. No lens movement unless\n+ *         AF is triggered.\n+ *\n+ *     AF_MODE_CONTINUOUS_VIDEO: Smooth continuous focusing, for recording\n+ *         video. Triggering immediately locks focus in current\n+ *         position. Canceling resumes cotinuous focusing.\n+ *\n+ *     AF_MODE_CONTINUOUS_PICTURE: Fast continuous focusing, for\n+ *        zero-shutter-lag still capture. Triggering locks focus once currently\n+ *        active sweep concludes. Canceling resumes continuous focusing.\n+ *\n+ *     AF_MODE_EDOF: Advanced extended depth of field focusing. There is no\n+ *        autofocus scan, so triggering one or canceling one has no effect.\n+ *        Images are focused automatically by the HAL.\n+ *\n+ *   ANDROID_CONTROL_AF_STATE: Dynamic metadata describing the current AF\n+ *       algorithm state, reported by the HAL in the result metadata.\n+ *\n+ *     AF_STATE_INACTIVE: No focusing has been done, or algorithm was\n+ *        reset. Lens is not moving. Always the state for MODE_OFF or MODE_EDOF.\n+ *        When the device is opened, it must start in this state.\n+ *\n+ *     AF_STATE_PASSIVE_SCAN: A continuous focus algorithm is currently scanning\n+ *        for good focus. The lens is moving.\n+ *\n+ *     AF_STATE_PASSIVE_FOCUSED: A continuous focus algorithm believes it is\n+ *        well focused. The lens is not moving. The HAL may spontaneously leave\n+ *        this state.\n+ *\n+ *     AF_STATE_PASSIVE_UNFOCUSED: A continuous focus algorithm believes it is\n+ *        not well focused. The lens is not moving. The HAL may spontaneously\n+ *        leave this state.\n+ *\n+ *     AF_STATE_ACTIVE_SCAN: A scan triggered by the user is underway.\n+ *\n+ *     AF_STATE_FOCUSED_LOCKED: The AF algorithm believes it is focused. The\n+ *        lens is not moving.\n+ *\n+ *     AF_STATE_NOT_FOCUSED_LOCKED: The AF algorithm has been unable to\n+ *        focus. The lens is not moving.\n+ *\n+ *   ANDROID_CONTROL_AF_TRIGGER: Control for starting an autofocus scan, the\n+ *       meaning of which is mode- and state- dependent. Set by the framework in\n+ *       the request settings.\n+ *\n+ *     AF_TRIGGER_IDLE: No current trigger.\n+ *\n+ *     AF_TRIGGER_START: Trigger start of AF scan. Effect is mode and state\n+ *         dependent.\n+ *\n+ *     AF_TRIGGER_CANCEL: Cancel current AF scan if any, and reset algorithm to\n+ *         default.\n+ *\n+ *  Additional metadata entries:\n+ *\n+ *   ANDROID_CONTROL_AF_REGIONS: Control for selecting the regions of the FOV\n+ *       that should be used to determine good focus. This applies to all AF\n+ *       modes that scan for focus. Set by the framework in the request\n+ *       settings.\n+ *\n+ * S4.2. Auto-exposure settings and result entries:\n+ *\n+ *  Main metadata entries:\n+ *\n+ *   ANDROID_CONTROL_AE_MODE: Control for selecting the current auto-exposure\n+ *       mode. Set by the framework in the request settings.\n+ *\n+ *     AE_MODE_OFF: Autoexposure is disabled; the user controls exposure, gain,\n+ *         frame duration, and flash.\n+ *\n+ *     AE_MODE_ON: Standard autoexposure, with flash control disabled. User may\n+ *         set flash to fire or to torch mode.\n+ *\n+ *     AE_MODE_ON_AUTO_FLASH: Standard autoexposure, with flash on at HAL's\n+ *         discretion for precapture and still capture. User control of flash\n+ *         disabled.\n+ *\n+ *     AE_MODE_ON_ALWAYS_FLASH: Standard autoexposure, with flash always fired\n+ *         for capture, and at HAL's discretion for precapture.. User control of\n+ *         flash disabled.\n+ *\n+ *     AE_MODE_ON_AUTO_FLASH_REDEYE: Standard autoexposure, with flash on at\n+ *         HAL's discretion for precapture and still capture. Use a flash burst\n+ *         at end of precapture sequence to reduce redeye in the final\n+ *         picture. User control of flash disabled.\n+ *\n+ *   ANDROID_CONTROL_AE_STATE: Dynamic metadata describing the current AE\n+ *       algorithm state, reported by the HAL in the result metadata.\n+ *\n+ *     AE_STATE_INACTIVE: Initial AE state after mode switch. When the device is\n+ *         opened, it must start in this state.\n+ *\n+ *     AE_STATE_SEARCHING: AE is not converged to a good value, and is adjusting\n+ *         exposure parameters.\n+ *\n+ *     AE_STATE_CONVERGED: AE has found good exposure values for the current\n+ *         scene, and the exposure parameters are not changing. HAL may\n+ *         spontaneously leave this state to search for better solution.\n+ *\n+ *     AE_STATE_LOCKED: AE has been locked with the AE_LOCK control. Exposure\n+ *         values are not changing.\n+ *\n+ *     AE_STATE_FLASH_REQUIRED: The HAL has converged exposure, but believes\n+ *         flash is required for a sufficiently bright picture. Used for\n+ *         determining if a zero-shutter-lag frame can be used.\n+ *\n+ *     AE_STATE_PRECAPTURE: The HAL is in the middle of a precapture\n+ *         sequence. Depending on AE mode, this mode may involve firing the\n+ *         flash for metering, or a burst of flash pulses for redeye reduction.\n+ *\n+ *   ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: Control for starting a metering\n+ *       sequence before capturing a high-quality image. Set by the framework in\n+ *       the request settings.\n+ *\n+ *      PRECAPTURE_TRIGGER_IDLE: No current trigger.\n+ *\n+ *      PRECAPTURE_TRIGGER_START: Start a precapture sequence. The HAL should\n+ *         use the subsequent requests to measure good exposure/white balance\n+ *         for an upcoming high-resolution capture.\n+ *\n+ *  Additional metadata entries:\n+ *\n+ *   ANDROID_CONTROL_AE_LOCK: Control for locking AE controls to their current\n+ *       values\n+ *\n+ *   ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: Control for adjusting AE\n+ *       algorithm target brightness point.\n+ *\n+ *   ANDROID_CONTROL_AE_TARGET_FPS_RANGE: Control for selecting the target frame\n+ *       rate range for the AE algorithm. The AE routine cannot change the frame\n+ *       rate to be outside these bounds.\n+ *\n+ *   ANDROID_CONTROL_AE_REGIONS: Control for selecting the regions of the FOV\n+ *       that should be used to determine good exposure levels. This applies to\n+ *       all AE modes besides OFF.\n+ *\n+ * S4.3. Auto-whitebalance settings and result entries:\n+ *\n+ *  Main metadata entries:\n+ *\n+ *   ANDROID_CONTROL_AWB_MODE: Control for selecting the current white-balance\n+ *       mode.\n+ *\n+ *     AWB_MODE_OFF: Auto-whitebalance is disabled. User controls color matrix.\n+ *\n+ *     AWB_MODE_AUTO: Automatic white balance is enabled; 3A controls color\n+ *        transform, possibly using more complex transforms than a simple\n+ *        matrix.\n+ *\n+ *     AWB_MODE_INCANDESCENT: Fixed white balance settings good for indoor\n+ *        incandescent (tungsten) lighting, roughly 2700K.\n+ *\n+ *     AWB_MODE_FLUORESCENT: Fixed white balance settings good for fluorescent\n+ *        lighting, roughly 5000K.\n+ *\n+ *     AWB_MODE_WARM_FLUORESCENT: Fixed white balance settings good for\n+ *        fluorescent lighting, roughly 3000K.\n+ *\n+ *     AWB_MODE_DAYLIGHT: Fixed white balance settings good for daylight,\n+ *        roughly 5500K.\n+ *\n+ *     AWB_MODE_CLOUDY_DAYLIGHT: Fixed white balance settings good for clouded\n+ *        daylight, roughly 6500K.\n+ *\n+ *     AWB_MODE_TWILIGHT: Fixed white balance settings good for\n+ *        near-sunset/sunrise, roughly 15000K.\n+ *\n+ *     AWB_MODE_SHADE: Fixed white balance settings good for areas indirectly\n+ *        lit by the sun, roughly 7500K.\n+ *\n+ *   ANDROID_CONTROL_AWB_STATE: Dynamic metadata describing the current AWB\n+ *       algorithm state, reported by the HAL in the result metadata.\n+ *\n+ *     AWB_STATE_INACTIVE: Initial AWB state after mode switch. When the device\n+ *         is opened, it must start in this state.\n+ *\n+ *     AWB_STATE_SEARCHING: AWB is not converged to a good value, and is\n+ *         changing color adjustment parameters.\n+ *\n+ *     AWB_STATE_CONVERGED: AWB has found good color adjustment values for the\n+ *         current scene, and the parameters are not changing. HAL may\n+ *         spontaneously leave this state to search for better solution.\n+ *\n+ *     AWB_STATE_LOCKED: AWB has been locked with the AWB_LOCK control. Color\n+ *         adjustment values are not changing.\n+ *\n+ *  Additional metadata entries:\n+ *\n+ *   ANDROID_CONTROL_AWB_LOCK: Control for locking AWB color adjustments to\n+ *       their current values.\n+ *\n+ *   ANDROID_CONTROL_AWB_REGIONS: Control for selecting the regions of the FOV\n+ *       that should be used to determine good color balance. This applies only\n+ *       to auto-WB mode.\n+ *\n+ * S4.4. General state machine transition notes\n+ *\n+ *   Switching between AF, AE, or AWB modes always resets the algorithm's state\n+ *   to INACTIVE.  Similarly, switching between CONTROL_MODE or\n+ *   CONTROL_SCENE_MODE if CONTROL_MODE == USE_SCENE_MODE resets all the\n+ *   algorithm states to INACTIVE.\n+ *\n+ *   The tables below are per-mode.\n+ *\n+ * S4.5. AF state machines\n+ *\n+ *                       when enabling AF or changing AF mode\n+ *| state              | trans. cause  | new state          | notes            |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| Any                | AF mode change| INACTIVE           |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *\n+ *                            mode = AF_MODE_OFF or AF_MODE_EDOF\n+ *| state              | trans. cause  | new state          | notes            |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           |               | INACTIVE           | Never changes    |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *\n+ *                            mode = AF_MODE_AUTO or AF_MODE_MACRO\n+ *| state              | trans. cause  | new state          | notes            |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           | AF_TRIGGER    | ACTIVE_SCAN        | Start AF sweep   |\n+ *|                    |               |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| ACTIVE_SCAN        | AF sweep done | FOCUSED_LOCKED     | If AF successful |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| ACTIVE_SCAN        | AF sweep done | NOT_FOCUSED_LOCKED | If AF successful |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| ACTIVE_SCAN        | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| FOCUSED_LOCKED     | AF_TRIGGER    | ACTIVE_SCAN        | Start new sweep  |\n+ *|                    |               |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Cancel/reset AF  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | ACTIVE_SCAN        | Start new sweep  |\n+ *|                    |               |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| All states         | mode change   | INACTIVE           |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *\n+ *                            mode = AF_MODE_CONTINUOUS_VIDEO\n+ *| state              | trans. cause  | new state          | notes            |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           | HAL initiates | PASSIVE_SCAN       | Start AF scan    |\n+ *|                    | new scan      |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           | AF_TRIGGER    | NOT_FOCUSED_LOCKED | AF state query   |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | HAL completes | PASSIVE_FOCUSED    | End AF scan      |\n+ *|                    | current scan  |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | HAL fails     | PASSIVE_UNFOCUSED  | End AF scan      |\n+ *|                    | current scan  |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |\n+ *|                    |               |                    | if focus is good |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |\n+ *|                    |               |                    | if focus is bad  |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | AF_CANCEL     | INACTIVE           | Reset lens       |\n+ *|                    |               |                    | position         |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_FOCUSED    | HAL initiates | PASSIVE_SCAN       | Start AF scan    |\n+ *|                    | new scan      |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_UNFOCUSED  | HAL initiates | PASSIVE_SCAN       | Start AF scan    |\n+ *|                    | new scan      |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_FOCUSED    | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_UNFOCUSED  | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| FOCUSED_LOCKED     | AF_TRIGGER    | FOCUSED_LOCKED     | No effect        |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Restart AF scan  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | NOT_FOCUSED_LOCKED | No effect        |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Restart AF scan  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *\n+ *                            mode = AF_MODE_CONTINUOUS_PICTURE\n+ *| state              | trans. cause  | new state          | notes            |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           | HAL initiates | PASSIVE_SCAN       | Start AF scan    |\n+ *|                    | new scan      |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           | AF_TRIGGER    | NOT_FOCUSED_LOCKED | AF state query   |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | HAL completes | PASSIVE_FOCUSED    | End AF scan      |\n+ *|                    | current scan  |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | HAL fails     | PASSIVE_UNFOCUSED  | End AF scan      |\n+ *|                    | current scan  |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | AF_TRIGGER    | FOCUSED_LOCKED     | Eventual trans.  |\n+ *|                    |               |                    | once focus good  |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Eventual trans.  |\n+ *|                    |               |                    | if cannot focus  |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_SCAN       | AF_CANCEL     | INACTIVE           | Reset lens       |\n+ *|                    |               |                    | position         |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_FOCUSED    | HAL initiates | PASSIVE_SCAN       | Start AF scan    |\n+ *|                    | new scan      |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_UNFOCUSED  | HAL initiates | PASSIVE_SCAN       | Start AF scan    |\n+ *|                    | new scan      |                    | Lens now moving  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_FOCUSED    | AF_TRIGGER    | FOCUSED_LOCKED     | Immediate trans. |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PASSIVE_UNFOCUSED  | AF_TRIGGER    | NOT_FOCUSED_LOCKED | Immediate trans. |\n+ *|                    |               |                    | Lens now locked  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| FOCUSED_LOCKED     | AF_TRIGGER    | FOCUSED_LOCKED     | No effect        |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| FOCUSED_LOCKED     | AF_CANCEL     | INACTIVE           | Restart AF scan  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| NOT_FOCUSED_LOCKED | AF_TRIGGER    | NOT_FOCUSED_LOCKED | No effect        |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| NOT_FOCUSED_LOCKED | AF_CANCEL     | INACTIVE           | Restart AF scan  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *\n+ * S4.6. AE and AWB state machines\n+ *\n+ *   The AE and AWB state machines are mostly identical. AE has additional\n+ *   FLASH_REQUIRED and PRECAPTURE states. So rows below that refer to those two\n+ *   states should be ignored for the AWB state machine.\n+ *\n+ *                  when enabling AE/AWB or changing AE/AWB mode\n+ *| state              | trans. cause  | new state          | notes            |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| Any                |  mode change  | INACTIVE           |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *\n+ *                            mode = AE_MODE_OFF / AWB mode not AUTO\n+ *| state              | trans. cause  | new state          | notes            |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           |               | INACTIVE           | AE/AWB disabled  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *\n+ *                            mode = AE_MODE_ON_* / AWB_MODE_AUTO\n+ *| state              | trans. cause  | new state          | notes            |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           | HAL initiates | SEARCHING          |                  |\n+ *|                    | AE/AWB scan   |                    |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| INACTIVE           | AE/AWB_LOCK   | LOCKED             | values locked    |\n+ *|                    | on            |                    |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| SEARCHING          | HAL finishes  | CONVERGED          | good values, not |\n+ *|                    | AE/AWB scan   |                    | changing         |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| SEARCHING          | HAL finishes  | FLASH_REQUIRED     | converged but too|\n+ *|                    | AE scan       |                    | dark w/o flash   |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| SEARCHING          | AE/AWB_LOCK   | LOCKED             | values locked    |\n+ *|                    | on            |                    |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| CONVERGED          | HAL initiates | SEARCHING          | values locked    |\n+ *|                    | AE/AWB scan   |                    |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| CONVERGED          | AE/AWB_LOCK   | LOCKED             | values locked    |\n+ *|                    | on            |                    |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| FLASH_REQUIRED     | HAL initiates | SEARCHING          | values locked    |\n+ *|                    | AE/AWB scan   |                    |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| FLASH_REQUIRED     | AE/AWB_LOCK   | LOCKED             | values locked    |\n+ *|                    | on            |                    |                  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| LOCKED             | AE/AWB_LOCK   | SEARCHING          | values not good  |\n+ *|                    | off           |                    | after unlock     |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| LOCKED             | AE/AWB_LOCK   | CONVERGED          | values good      |\n+ *|                    | off           |                    | after unlock     |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| LOCKED             | AE_LOCK       | FLASH_REQUIRED     | exposure good,   |\n+ *|                    | off           |                    | but too dark     |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| All AE states      | PRECAPTURE_   | PRECAPTURE         | Start precapture |\n+ *|                    | START         |                    | sequence         |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PRECAPTURE         | Sequence done.| CONVERGED          | Ready for high-  |\n+ *|                    | AE_LOCK off   |                    | quality capture  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *| PRECAPTURE         | Sequence done.| LOCKED             | Ready for high-  |\n+ *|                    | AE_LOCK on    |                    | quality capture  |\n+ *+--------------------+---------------+--------------------+------------------+\n+ *\n+ */\n+\n+/**\n+ * S5. Cropping:\n+ *\n+ * Cropping of the full pixel array (for digital zoom and other use cases where\n+ * a smaller FOV is desirable) is communicated through the\n+ * ANDROID_SCALER_CROP_REGION setting. This is a per-request setting, and can\n+ * change on a per-request basis, which is critical for implementing smooth\n+ * digital zoom.\n+ *\n+ * The region is defined as a rectangle (x, y, width, height), with (x, y)\n+ * describing the top-left corner of the rectangle. The rectangle is defined on\n+ * the coordinate system of the sensor active pixel array, with (0,0) being the\n+ * top-left pixel of the active pixel array. Therefore, the width and height\n+ * cannot be larger than the dimensions reported in the\n+ * ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY static info field. The minimum allowed\n+ * width and height are reported by the HAL through the\n+ * ANDROID_SCALER_MAX_DIGITAL_ZOOM static info field, which describes the\n+ * maximum supported zoom factor. Therefore, the minimum crop region width and\n+ * height are:\n+ *\n+ * {width, height} =\n+ *    { floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[0] /\n+ *        ANDROID_SCALER_MAX_DIGITAL_ZOOM),\n+ *      floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[1] /\n+ *        ANDROID_SCALER_MAX_DIGITAL_ZOOM) }\n+ *\n+ * If the crop region needs to fulfill specific requirements (for example, it\n+ * needs to start on even coordinates, and its width/height needs to be even),\n+ * the HAL must do the necessary rounding and write out the final crop region\n+ * used in the output result metadata. Similarly, if the HAL implements video\n+ * stabilization, it must adjust the result crop region to describe the region\n+ * actually included in the output after video stabilization is applied. In\n+ * general, a camera-using application must be able to determine the field of\n+ * view it is receiving based on the crop region, the dimensions of the image\n+ * sensor, and the lens focal length.\n+ *\n+ * It is assumed that the cropping is applied after raw to other color space\n+ * conversion. Raw streams (RAW16 and RAW_OPAQUE) don't have this conversion stage,\n+ * and are not croppable. Therefore, the crop region must be ignored by the HAL\n+ * for raw streams.\n+ *\n+ * Since the crop region applies to all non-raw streams, which may have different aspect\n+ * ratios than the crop region, the exact sensor region used for each stream may\n+ * be smaller than the crop region. Specifically, each stream should maintain\n+ * square pixels and its aspect ratio by minimally further cropping the defined\n+ * crop region. If the stream's aspect ratio is wider than the crop region, the\n+ * stream should be further cropped vertically, and if the stream's aspect ratio\n+ * is narrower than the crop region, the stream should be further cropped\n+ * horizontally.\n+ *\n+ * In all cases, the stream crop must be centered within the full crop region,\n+ * and each stream is only either cropped horizontally or vertical relative to\n+ * the full crop region, never both.\n+ *\n+ * For example, if two streams are defined, a 640x480 stream (4:3 aspect), and a\n+ * 1280x720 stream (16:9 aspect), below demonstrates the expected output regions\n+ * for each stream for a few sample crop regions, on a hypothetical 3 MP (2000 x\n+ * 1500 pixel array) sensor.\n+ *\n+ * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)\n+ *\n+ *   640x480 stream crop: (500, 375, 1000, 750) (equal to crop region)\n+ *   1280x720 stream crop: (500, 469, 1000, 562) (marked with =)\n+ *\n+ * 0                   1000               2000\n+ * +---------+---------+---------+----------+\n+ * | Active pixel array                     |\n+ * |                                        |\n+ * |                                        |\n+ * +         +-------------------+          + 375\n+ * |         |                   |          |\n+ * |         O===================O          |\n+ * |         I 1280x720 stream   I          |\n+ * +         I                   I          + 750\n+ * |         I                   I          |\n+ * |         O===================O          |\n+ * |         |                   |          |\n+ * +         +-------------------+          + 1125\n+ * |          Crop region, 640x480 stream   |\n+ * |                                        |\n+ * |                                        |\n+ * +---------+---------+---------+----------+ 1500\n+ *\n+ * Crop region: (500, 375, 1333, 750) (16:9 aspect ratio)\n+ *\n+ *   640x480 stream crop: (666, 375, 1000, 750) (marked with =)\n+ *   1280x720 stream crop: (500, 375, 1333, 750) (equal to crop region)\n+ *\n+ * 0                   1000               2000\n+ * +---------+---------+---------+----------+\n+ * | Active pixel array                     |\n+ * |                                        |\n+ * |                                        |\n+ * +         +---O==================O---+   + 375\n+ * |         |   I 640x480 stream   I   |   |\n+ * |         |   I                  I   |   |\n+ * |         |   I                  I   |   |\n+ * +         |   I                  I   |   + 750\n+ * |         |   I                  I   |   |\n+ * |         |   I                  I   |   |\n+ * |         |   I                  I   |   |\n+ * +         +---O==================O---+   + 1125\n+ * |          Crop region, 1280x720 stream  |\n+ * |                                        |\n+ * |                                        |\n+ * +---------+---------+---------+----------+ 1500\n+ *\n+ * Crop region: (500, 375, 750, 750) (1:1 aspect ratio)\n+ *\n+ *   640x480 stream crop: (500, 469, 750, 562) (marked with =)\n+ *   1280x720 stream crop: (500, 543, 750, 414) (marged with #)\n+ *\n+ * 0                   1000               2000\n+ * +---------+---------+---------+----------+\n+ * | Active pixel array                     |\n+ * |                                        |\n+ * |                                        |\n+ * +         +--------------+               + 375\n+ * |         O==============O               |\n+ * |         ################               |\n+ * |         #              #               |\n+ * +         #              #               + 750\n+ * |         #              #               |\n+ * |         ################ 1280x720      |\n+ * |         O==============O 640x480       |\n+ * +         +--------------+               + 1125\n+ * |          Crop region                   |\n+ * |                                        |\n+ * |                                        |\n+ * +---------+---------+---------+----------+ 1500\n+ *\n+ * And a final example, a 1024x1024 square aspect ratio stream instead of the\n+ * 480p stream:\n+ *\n+ * Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)\n+ *\n+ *   1024x1024 stream crop: (625, 375, 750, 750) (marked with #)\n+ *   1280x720 stream crop: (500, 469, 1000, 562) (marked with =)\n+ *\n+ * 0                   1000               2000\n+ * +---------+---------+---------+----------+\n+ * | Active pixel array                     |\n+ * |                                        |\n+ * |              1024x1024 stream          |\n+ * +         +--###############--+          + 375\n+ * |         |  #             #  |          |\n+ * |         O===================O          |\n+ * |         I 1280x720 stream   I          |\n+ * +         I                   I          + 750\n+ * |         I                   I          |\n+ * |         O===================O          |\n+ * |         |  #             #  |          |\n+ * +         +--###############--+          + 1125\n+ * |          Crop region                   |\n+ * |                                        |\n+ * |                                        |\n+ * +---------+---------+---------+----------+ 1500\n+ *\n+ */\n+\n+/**\n+ * S6. Error management:\n+ *\n+ * Camera HAL device ops functions that have a return value will all return\n+ * -ENODEV / NULL in case of a serious error. This means the device cannot\n+ * continue operation, and must be closed by the framework. Once this error is\n+ * returned by some method, or if notify() is called with ERROR_DEVICE, only\n+ * the close() method can be called successfully. All other methods will return\n+ * -ENODEV / NULL.\n+ *\n+ * If a device op is called in the wrong sequence, for example if the framework\n+ * calls configure_streams() is called before initialize(), the device must\n+ * return -ENOSYS from the call, and do nothing.\n+ *\n+ * Transient errors in image capture must be reported through notify() as follows:\n+ *\n+ * - The failure of an entire capture to occur must be reported by the HAL by\n+ *   calling notify() with ERROR_REQUEST. Individual errors for the result\n+ *   metadata or the output buffers must not be reported in this case.\n+ *\n+ * - If the metadata for a capture cannot be produced, but some image buffers\n+ *   were filled, the HAL must call notify() with ERROR_RESULT.\n+ *\n+ * - If an output image buffer could not be filled, but either the metadata was\n+ *   produced or some other buffers were filled, the HAL must call notify() with\n+ *   ERROR_BUFFER for each failed buffer.\n+ *\n+ * In each of these transient failure cases, the HAL must still call\n+ * process_capture_result, with valid output and input (if an input buffer was\n+ * submitted) buffer_handle_t. If the result metadata could not be produced, it\n+ * should be NULL. If some buffers could not be filled, they must be returned with\n+ * process_capture_result in the error state, their release fences must be set to\n+ * the acquire fences passed by the framework, or -1 if they have been waited on by\n+ * the HAL already.\n+ *\n+ * Invalid input arguments result in -EINVAL from the appropriate methods. In\n+ * that case, the framework must act as if that call had never been made.\n+ *\n+ */\n+\n+/**\n+ * S7. Key Performance Indicator (KPI) glossary:\n+ *\n+ * This includes some critical definitions that are used by KPI metrics.\n+ *\n+ * Pipeline Latency:\n+ *  For a given capture request, the duration from the framework calling\n+ *  process_capture_request to the HAL sending capture result and all buffers\n+ *  back by process_capture_result call. To make the Pipeline Latency measure\n+ *  independent of frame rate, it is measured by frame count.\n+ *\n+ *  For example, when frame rate is 30 (fps), the frame duration (time interval\n+ *  between adjacent frame capture time) is 33 (ms).\n+ *  If it takes 5 frames for framework to get the result and buffers back for\n+ *  a given request, then the Pipeline Latency is 5 (frames), instead of\n+ *  5 x 33 = 165 (ms).\n+ *\n+ *  The Pipeline Latency is determined by android.request.pipelineDepth and\n+ *  android.request.pipelineMaxDepth, see their definitions for more details.\n+ *\n+ */\n+\n+/**\n+ * S8. Sample Use Cases:\n+ *\n+ * This includes some typical use case examples the camera HAL may support.\n+ *\n+ * S8.1 Zero Shutter Lag (ZSL) with CAMERA3_STREAM_BIDIRECTIONAL stream.\n+ *\n+ *   For this use case, the bidirectional stream will be used by the framework as follows:\n+ *\n+ *   1. The framework includes a buffer from this stream as output buffer in a\n+ *      request as normal.\n+ *\n+ *   2. Once the HAL device returns a filled output buffer to the framework,\n+ *      the framework may do one of two things with the filled buffer:\n+ *\n+ *   2. a. The framework uses the filled data, and returns the now-used buffer\n+ *         to the stream queue for reuse. This behavior exactly matches the\n+ *         OUTPUT type of stream.\n+ *\n+ *   2. b. The framework wants to reprocess the filled data, and uses the\n+ *         buffer as an input buffer for a request. Once the HAL device has\n+ *         used the reprocessing buffer, it then returns it to the\n+ *         framework. The framework then returns the now-used buffer to the\n+ *         stream queue for reuse.\n+ *\n+ *   3. The HAL device will be given the buffer again as an output buffer for\n+ *        a request at some future point.\n+ *\n+ *   For ZSL use case, the pixel format for bidirectional stream will be\n+ *   HAL_PIXEL_FORMAT_RAW_OPAQUE or HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED if it\n+ *   is listed in android.scaler.availableInputOutputFormatsMap. When\n+ *   HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, the gralloc\n+ *   usage flags for the consumer endpoint will be set to GRALLOC_USAGE_HW_CAMERA_ZSL.\n+ *   A configuration stream list that has BIDIRECTIONAL stream used as input, will\n+ *   usually also have a distinct OUTPUT stream to get the reprocessing data. For example,\n+ *   for the ZSL use case, the stream list might be configured with the following:\n+ *\n+ *     - A HAL_PIXEL_FORMAT_RAW_OPAQUE bidirectional stream is used\n+ *       as input.\n+ *     - And a HAL_PIXEL_FORMAT_BLOB (JPEG) output stream.\n+ *\n+ * S8.2 ZSL (OPAQUE) reprocessing with CAMERA3_STREAM_INPUT stream.\n+ *\n+ * CAMERA_DEVICE_API_VERSION_3_3:\n+ *   When OPAQUE_REPROCESSING capability is supported by the camera device, the INPUT stream\n+ *   can be used for application/framework implemented use case like Zero Shutter Lag (ZSL).\n+ *   This kind of stream will be used by the framework as follows:\n+ *\n+ *   1. Application/framework configures an opaque (RAW or YUV based) format output stream that is\n+ *      used to produce the ZSL output buffers. The stream pixel format will be\n+ *      HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED.\n+ *\n+ *   2. Application/framework configures an opaque format input stream that is used to\n+ *      send the reprocessing ZSL buffers to the HAL. The stream pixel format will\n+ *      also be HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED.\n+ *\n+ *   3. Application/framework configures a YUV/JPEG output stream that is used to receive the\n+ *      reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB.\n+ *\n+ *   4. Application/framework picks a ZSL buffer from the ZSL output stream when a ZSL capture is\n+ *      issued by the application, and sends the data back as an input buffer in a\n+ *      reprocessing request, then sends to the HAL for reprocessing.\n+ *\n+ *   5. The HAL sends back the output YUV/JPEG result to framework.\n+ *\n+ *   The HAL can select the actual opaque buffer format and configure the ISP pipeline\n+ *   appropriately based on the HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format and\n+ *   the gralloc usage flag GRALLOC_USAGE_HW_CAMERA_ZSL.\n+\n+ * S8.3 YUV reprocessing with CAMERA3_STREAM_INPUT stream.\n+ *\n+ *   When YUV reprocessing is supported by the HAL, the INPUT stream\n+ *   can be used for the YUV reprocessing use cases like lucky-shot and image fusion.\n+ *   This kind of stream will be used by the framework as follows:\n+ *\n+ *   1. Application/framework configures an YCbCr_420 format output stream that is\n+ *      used to produce the output buffers.\n+ *\n+ *   2. Application/framework configures an YCbCr_420 format input stream that is used to\n+ *      send the reprocessing YUV buffers to the HAL.\n+ *\n+ *   3. Application/framework configures a YUV/JPEG output stream that is used to receive the\n+ *      reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB.\n+ *\n+ *   4. Application/framework processes the output buffers (could be as simple as picking\n+ *      an output buffer directly) from the output stream when a capture is issued, and sends\n+ *      the data back as an input buffer in a reprocessing request, then sends to the HAL\n+ *      for reprocessing.\n+ *\n+ *   5. The HAL sends back the output YUV/JPEG result to framework.\n+ *\n+ */\n+\n+/**\n+ *   S9. Notes on Controls and Metadata\n+ *\n+ *   This section contains notes about the interpretation and usage of various metadata tags.\n+ *\n+ *   S9.1 HIGH_QUALITY and FAST modes.\n+ *\n+ *   Many camera post-processing blocks may be listed as having HIGH_QUALITY,\n+ *   FAST, and OFF operating modes. These blocks will typically also have an\n+ *   'available modes' tag representing which of these operating modes are\n+ *   available on a given device. The general policy regarding implementing\n+ *   these modes is as follows:\n+ *\n+ *   1. Operating mode controls of hardware blocks that cannot be disabled\n+ *      must not list OFF in their corresponding 'available modes' tags.\n+ *\n+ *   2. OFF will always be included in their corresponding 'available modes'\n+ *      tag if it is possible to disable that hardware block.\n+ *\n+ *   3. FAST must always be included in the 'available modes' tags for all\n+ *      post-processing blocks supported on the device.  If a post-processing\n+ *      block also has a slower and higher quality operating mode that does\n+ *      not meet the framerate requirements for FAST mode, HIGH_QUALITY should\n+ *      be included in the 'available modes' tag to represent this operating\n+ *      mode.\n+ */\n+\n+/**\n+ *   S10. Reprocessing flow and controls\n+ *\n+ *   This section describes the OPAQUE and YUV reprocessing flow and controls. OPAQUE reprocessing\n+ *   uses an opaque format that is not directly application-visible, and the application can\n+ *   only select some of the output buffers and send back to HAL for reprocessing, while YUV\n+ *   reprocessing gives the application opportunity to process the buffers before reprocessing.\n+ *\n+ *   S8 gives the stream configurations for the typical reprocessing uses cases,\n+ *   this section specifies the buffer flow and controls in more details.\n+ *\n+ *   S10.1 OPAQUE (typically for ZSL use case) reprocessing flow and controls\n+ *\n+ *   For OPAQUE reprocessing (e.g. ZSL) use case, after the application creates the specific\n+ *   output and input streams, runtime buffer flow and controls are specified as below:\n+ *\n+ *   1. Application starts output streaming by sending repeating requests for output\n+ *      opaque buffers and preview. The buffers are held by an application\n+ *      maintained circular buffer. The requests are based on CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG\n+ *      capture template, which should have all necessary settings that guarantee output\n+ *      frame rate is not slowed down relative to sensor output frame rate.\n+ *\n+ *   2. When a capture is issued, the application selects one output buffer based\n+ *      on application buffer selection logic, e.g. good AE and AF statistics etc.\n+ *      Application then creates an reprocess request based on the capture result associated\n+ *      with this selected buffer. The selected output buffer is now added to this reprocess\n+ *      request as an input buffer, the output buffer of this reprocess request should be\n+ *      either JPEG output buffer or YUV output buffer, or both, depending on the application\n+ *      choice.\n+ *\n+ *   3. Application then alters the reprocess settings to get best image quality. The HAL must\n+ *      support and only support below controls if the HAL support OPAQUE_REPROCESSING capability:\n+ *          - android.jpeg.* (if JPEG buffer is included as one of the output)\n+ *          - android.noiseReduction.mode (change to HIGH_QUALITY if it is supported)\n+ *          - android.edge.mode (change to HIGH_QUALITY if it is supported)\n+ *       All other controls must be ignored by the HAL.\n+ *   4. HAL processed the input buffer and return the output buffers in the capture results\n+ *      as normal.\n+ *\n+ *   S10.2 YUV reprocessing flow and controls\n+ *\n+ *   The YUV reprocessing buffer flow is similar as OPAQUE reprocessing, with below difference:\n+ *\n+ *   1. Application may want to have finer granularity control of the intermediate YUV images\n+ *      (before reprocessing). For example, application may choose\n+ *          - android.noiseReduction.mode == MINIMAL\n+ *      to make sure the no YUV domain noise reduction has applied to the output YUV buffers,\n+ *      then it can do its own advanced noise reduction on them. For OPAQUE reprocessing case, this\n+ *      doesn't matter, as long as the final reprocessed image has the best quality.\n+ *   2. Application may modify the YUV output buffer data. For example, for image fusion use\n+ *      case, where multiple output images are merged together to improve the signal-to-noise\n+ *      ratio (SNR). The input buffer may be generated from multiple buffers by the application.\n+ *      To avoid excessive amount of noise reduction and insufficient amount of edge enhancement\n+ *      being applied to the input buffer, the application can hint the HAL  how much effective\n+ *      exposure time improvement has been done by the application, then the HAL can adjust the\n+ *      noise reduction and edge enhancement paramters to get best reprocessed image quality.\n+ *      Below tag can be used for this purpose:\n+ *          - android.reprocess.effectiveExposureFactor\n+ *      The value would be exposure time increase factor applied to the original output image,\n+ *      for example, if there are N image merged, the exposure time increase factor would be up\n+ *      to sqrt(N). See this tag spec for more details.\n+ *\n+ *   S10.3 Reprocessing pipeline characteristics\n+ *\n+ *   Reprocessing pipeline has below different characteristics comparing with normal output\n+ *   pipeline:\n+ *\n+ *   1. The reprocessing result can be returned ahead of the pending normal output results. But\n+ *      the FIFO ordering must be maintained for all reprocessing results. For example, there are\n+ *      below requests (A stands for output requests, B stands for reprocessing requests)\n+ *      being processed by the HAL:\n+ *          A1, A2, A3, A4, B1, A5, B2, A6...\n+ *      result of B1 can be returned before A1-A4, but result of B2 must be returned after B1.\n+ *   2. Single input rule: For a given reprocessing request, all output buffers must be from the\n+ *      input buffer, rather than sensor output. For example, if a reprocess request include both\n+ *      JPEG and preview buffers, all output buffers must be produced from the input buffer\n+ *      included by the reprocessing request, rather than sensor. The HAL must not output preview\n+ *      buffers from sensor, while output JPEG buffer from the input buffer.\n+ *   3. Input buffer will be from camera output directly (ZSL case) or indirectly(image fusion\n+ *      case). For the case where buffer is modified, the size will remain same. The HAL can\n+ *      notify CAMERA3_MSG_ERROR_REQUEST if buffer from unknown source is sent.\n+ *   4. Result as reprocessing request: The HAL can expect that a reprocessing request is a copy\n+ *      of one of the output results with minor allowed setting changes. The HAL can notify\n+ *      CAMERA3_MSG_ERROR_REQUEST if a request from unknown source is issued.\n+ *   5. Output buffers may not be used as inputs across the configure stream boundary, This is\n+ *      because an opaque stream like the ZSL output stream may have different actual image size\n+ *      inside of the ZSL buffer to save power and bandwidth for smaller resolution JPEG capture.\n+ *      The HAL may notify CAMERA3_MSG_ERROR_REQUEST if this case occurs.\n+ *   6. HAL Reprocess requests error reporting during flush should follow the same rule specified\n+ *      by flush() method.\n+ *\n+ */\n+\n+__BEGIN_DECLS\n+\n+struct camera3_device;\n+\n+/**********************************************************************\n+ *\n+ * Camera3 stream and stream buffer definitions.\n+ *\n+ * These structs and enums define the handles and contents of the input and\n+ * output streams connecting the HAL to various framework and application buffer\n+ * consumers. Each stream is backed by a gralloc buffer queue.\n+ *\n+ */\n+\n+/**\n+ * camera3_stream_type_t:\n+ *\n+ * The type of the camera stream, which defines whether the camera HAL device is\n+ * the producer or the consumer for that stream, and how the buffers of the\n+ * stream relate to the other streams.\n+ */\n+typedef enum camera3_stream_type {\n+    /**\n+     * This stream is an output stream; the camera HAL device will be\n+     * responsible for filling buffers from this stream with newly captured or\n+     * reprocessed image data.\n+     */\n+    CAMERA3_STREAM_OUTPUT = 0,\n+\n+    /**\n+     * This stream is an input stream; the camera HAL device will be responsible\n+     * for reading buffers from this stream and sending them through the camera\n+     * processing pipeline, as if the buffer was a newly captured image from the\n+     * imager.\n+     *\n+     * The pixel format for input stream can be any format reported by\n+     * android.scaler.availableInputOutputFormatsMap. The pixel format of the\n+     * output stream that is used to produce the reprocessing data may be any\n+     * format reported by android.scaler.availableStreamConfigurations. The\n+     * supported input/output stream combinations depends the camera device\n+     * capabilities, see android.scaler.availableInputOutputFormatsMap for\n+     * stream map details.\n+     *\n+     * This kind of stream is generally used to reprocess data into higher\n+     * quality images (that otherwise would cause a frame rate performance\n+     * loss), or to do off-line reprocessing.\n+     *\n+     * CAMERA_DEVICE_API_VERSION_3_3:\n+     *    The typical use cases are OPAQUE (typically ZSL) and YUV reprocessing,\n+     *    see S8.2, S8.3 and S10 for more details.\n+     */\n+    CAMERA3_STREAM_INPUT = 1,\n+\n+    /**\n+     * This stream can be used for input and output. Typically, the stream is\n+     * used as an output stream, but occasionally one already-filled buffer may\n+     * be sent back to the HAL device for reprocessing.\n+     *\n+     * This kind of stream is meant generally for Zero Shutter Lag (ZSL)\n+     * features, where copying the captured image from the output buffer to the\n+     * reprocessing input buffer would be expensive. See S8.1 for more details.\n+     *\n+     * Note that the HAL will always be reprocessing data it produced.\n+     *\n+     */\n+    CAMERA3_STREAM_BIDIRECTIONAL = 2,\n+\n+    /**\n+     * Total number of framework-defined stream types\n+     */\n+    CAMERA3_NUM_STREAM_TYPES\n+\n+} camera3_stream_type_t;\n+\n+/**\n+ * camera3_stream_rotation_t:\n+ *\n+ * The required counterclockwise rotation of camera stream.\n+ */\n+typedef enum camera3_stream_rotation {\n+    /* No rotation */\n+    CAMERA3_STREAM_ROTATION_0 = 0,\n+\n+    /* Rotate by 90 degree counterclockwise */\n+    CAMERA3_STREAM_ROTATION_90 = 1,\n+\n+    /* Rotate by 180 degree counterclockwise */\n+    CAMERA3_STREAM_ROTATION_180 = 2,\n+\n+    /* Rotate by 270 degree counterclockwise */\n+    CAMERA3_STREAM_ROTATION_270 = 3\n+} camera3_stream_rotation_t;\n+\n+/**\n+ * camera3_stream_configuration_mode_t:\n+ *\n+ * This defines the general operation mode for the HAL (for a given stream configuration), where\n+ * modes besides NORMAL have different semantics, and usually limit the generality of the API in\n+ * exchange for higher performance in some particular area.\n+ */\n+typedef enum camera3_stream_configuration_mode {\n+    /**\n+     * Normal stream configuration operation mode. This is the default camera operation mode,\n+     * where all semantics of HAL APIs and metadata controls apply.\n+     */\n+    CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE = 0,\n+\n+    /**\n+     * Special constrained high speed operation mode for devices that can not support high\n+     * speed output in NORMAL mode. All streams in this configuration are operating at high speed\n+     * mode and have different characteristics and limitations to achieve high speed output.\n+     * The NORMAL mode can still be used for high speed output if the HAL can support high speed\n+     * output while satisfying all the semantics of HAL APIs and metadata controls. It is\n+     * recommended for the HAL to support high speed output in NORMAL mode (by advertising the high\n+     * speed FPS ranges in android.control.aeAvailableTargetFpsRanges) if possible.\n+     *\n+     * This mode has below limitations/requirements:\n+     *\n+     *   1. The HAL must support up to 2 streams with sizes reported by\n+     *      android.control.availableHighSpeedVideoConfigurations.\n+     *   2. In this mode, the HAL is expected to output up to 120fps or higher. This mode must\n+     *      support the targeted FPS range and size configurations reported by\n+     *      android.control.availableHighSpeedVideoConfigurations.\n+     *   3. The HAL must support HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED output stream format.\n+     *   4. To achieve efficient high speed streaming, the HAL may have to aggregate\n+     *      multiple frames together and send to camera device for processing where the request\n+     *      controls are same for all the frames in this batch (batch mode). The HAL must support\n+     *      max batch size and the max batch size requirements defined by\n+     *      android.control.availableHighSpeedVideoConfigurations.\n+     *   5. In this mode, the HAL must override aeMode, awbMode, and afMode to ON, ON, and\n+     *      CONTINUOUS_VIDEO, respectively. All post-processing block mode controls must be\n+     *      overridden to be FAST. Therefore, no manual control of capture and post-processing\n+     *      parameters is possible. All other controls operate the same as when\n+     *      android.control.mode == AUTO. This means that all other android.control.* fields\n+     *      must continue to work, such as\n+     *\n+     *      android.control.aeTargetFpsRange\n+     *      android.control.aeExposureCompensation\n+     *      android.control.aeLock\n+     *      android.control.awbLock\n+     *      android.control.effectMode\n+     *      android.control.aeRegions\n+     *      android.control.afRegions\n+     *      android.control.awbRegions\n+     *      android.control.afTrigger\n+     *      android.control.aePrecaptureTrigger\n+     *\n+     *      Outside of android.control.*, the following controls must work:\n+     *\n+     *      android.flash.mode (TORCH mode only, automatic flash for still capture will not work\n+     *      since aeMode is ON)\n+     *      android.lens.opticalStabilizationMode (if it is supported)\n+     *      android.scaler.cropRegion\n+     *      android.statistics.faceDetectMode (if it is supported)\n+     *\n+     * For more details about high speed stream requirements, see\n+     * android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO\n+     * capability defined in android.request.availableCapabilities.\n+     *\n+     * This mode only needs to be supported by HALs that include CONSTRAINED_HIGH_SPEED_VIDEO in\n+     * the android.request.availableCapabilities static metadata.\n+     */\n+    CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE = 1,\n+\n+    /**\n+     * First value for vendor-defined stream configuration modes.\n+     */\n+    CAMERA3_VENDOR_STREAM_CONFIGURATION_MODE_START = 0x8000\n+} camera3_stream_configuration_mode_t;\n+\n+/**\n+ * camera3_stream_t:\n+ *\n+ * A handle to a single camera input or output stream. A stream is defined by\n+ * the framework by its buffer resolution and format, and additionally by the\n+ * HAL with the gralloc usage flags and the maximum in-flight buffer count.\n+ *\n+ * The stream structures are owned by the framework, but pointers to a\n+ * camera3_stream passed into the HAL by configure_streams() are valid until the\n+ * end of the first subsequent configure_streams() call that _does not_ include\n+ * that camera3_stream as an argument, or until the end of the close() call.\n+ *\n+ * All camera3_stream framework-controlled members are immutable once the\n+ * camera3_stream is passed into configure_streams().  The HAL may only change\n+ * the HAL-controlled parameters during a configure_streams() call, except for\n+ * the contents of the private pointer.\n+ *\n+ * If a configure_streams() call returns a non-fatal error, all active streams\n+ * remain valid as if configure_streams() had not been called.\n+ *\n+ * The endpoint of the stream is not visible to the camera HAL device.\n+ * In DEVICE_API_VERSION_3_1, this was changed to share consumer usage flags\n+ * on streams where the camera is a producer (OUTPUT and BIDIRECTIONAL stream\n+ * types) see the usage field below.\n+ */\n+typedef struct camera3_stream {\n+\n+    /*****\n+     * Set by framework before configure_streams()\n+     */\n+\n+    /**\n+     * The type of the stream, one of the camera3_stream_type_t values.\n+     */\n+    int stream_type;\n+\n+    /**\n+     * The width in pixels of the buffers in this stream\n+     */\n+    uint32_t width;\n+\n+    /**\n+     * The height in pixels of the buffers in this stream\n+     */\n+    uint32_t height;\n+\n+    /**\n+     * The pixel format for the buffers in this stream. Format is a value from\n+     * the HAL_PIXEL_FORMAT_* list in system/core/include/system/graphics.h, or\n+     * from device-specific headers.\n+     *\n+     * If HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform\n+     * gralloc module will select a format based on the usage flags provided by\n+     * the camera device and the other endpoint of the stream.\n+     *\n+     * <= CAMERA_DEVICE_API_VERSION_3_1:\n+     *\n+     * The camera HAL device must inspect the buffers handed to it in the\n+     * subsequent register_stream_buffers() call to obtain the\n+     * implementation-specific format details, if necessary.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * register_stream_buffers() won't be called by the framework, so the HAL\n+     * should configure the ISP and sensor pipeline based purely on the sizes,\n+     * usage flags, and formats for the configured streams.\n+     */\n+    int format;\n+\n+    /*****\n+     * Set by HAL during configure_streams().\n+     */\n+\n+    /**\n+     * The gralloc usage flags for this stream, as needed by the HAL. The usage\n+     * flags are defined in gralloc.h (GRALLOC_USAGE_*), or in device-specific\n+     * headers.\n+     *\n+     * For output streams, these are the HAL's producer usage flags. For input\n+     * streams, these are the HAL's consumer usage flags. The usage flags from\n+     * the producer and the consumer will be combined together and then passed\n+     * to the platform gralloc HAL module for allocating the gralloc buffers for\n+     * each stream.\n+     *\n+     * Version information:\n+     *\n+     * == CAMERA_DEVICE_API_VERSION_3_0:\n+     *\n+     *   No initial value guaranteed when passed via configure_streams().\n+     *   HAL may not use this field as input, and must write over this field\n+     *   with its usage flags.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_1:\n+     *\n+     *   For stream_type OUTPUT and BIDIRECTIONAL, when passed via\n+     *   configure_streams(), the initial value of this is the consumer's\n+     *   usage flags.  The HAL may use these consumer flags to decide stream\n+     *   configuration.\n+     *   For stream_type INPUT, when passed via configure_streams(), the initial\n+     *   value of this is 0.\n+     *   For all streams passed via configure_streams(), the HAL must write\n+     *   over this field with its usage flags.\n+     */\n+    uint32_t usage;\n+\n+    /**\n+     * The maximum number of buffers the HAL device may need to have dequeued at\n+     * the same time. The HAL device may not have more buffers in-flight from\n+     * this stream than this value.\n+     */\n+    uint32_t max_buffers;\n+\n+    /**\n+     * A handle to HAL-private information for the stream. Will not be inspected\n+     * by the framework code.\n+     */\n+    void *priv;\n+\n+    /**\n+     * A field that describes the contents of the buffer. The format and buffer\n+     * dimensions define the memory layout and structure of the stream buffers,\n+     * while dataSpace defines the meaning of the data within the buffer.\n+     *\n+     * For most formats, dataSpace defines the color space of the image data.\n+     * In addition, for some formats, dataSpace indicates whether image- or\n+     * depth-based data is requested.  See system/core/include/system/graphics.h\n+     * for details of formats and valid dataSpace values for each format.\n+     *\n+     * Version information:\n+     *\n+     * < CAMERA_DEVICE_API_VERSION_3_3:\n+     *\n+     *   Not defined and should not be accessed. dataSpace should be assumed to\n+     *   be HAL_DATASPACE_UNKNOWN, and the appropriate color space, etc, should\n+     *   be determined from the usage flags and the format.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_3:\n+     *\n+     *   Always set by the camera service. HAL must use this dataSpace to\n+     *   configure the stream to the correct colorspace, or to select between\n+     *   color and depth outputs if supported.\n+     */\n+    android_dataspace_t data_space;\n+\n+    /**\n+     * The required output rotation of the stream, one of\n+     * the camera3_stream_rotation_t values. This must be inspected by HAL along\n+     * with stream width and height. For example, if the rotation is 90 degree\n+     * and the stream width and height is 720 and 1280 respectively, camera service\n+     * will supply buffers of size 720x1280, and HAL should capture a 1280x720 image\n+     * and rotate the image by 90 degree counterclockwise. The rotation field is\n+     * no-op when the stream type is input. Camera HAL must ignore the rotation\n+     * field for an input stream.\n+     *\n+     * <= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     *    Not defined and must not be accessed. HAL must not apply any rotation\n+     *    on output images.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_3:\n+     *\n+     *    Always set by camera service. HAL must inspect this field during stream\n+     *    configuration and returns -EINVAL if HAL cannot perform such rotation.\n+     *    HAL must always support CAMERA3_STREAM_ROTATION_0, so a\n+     *    configure_streams() call must not fail for unsupported rotation if\n+     *    rotation field of all streams is CAMERA3_STREAM_ROTATION_0.\n+     *\n+     */\n+    int rotation;\n+\n+    /**\n+     * This should be one of the camera3_stream_rotation_t values except for\n+     * CAMERA3_STREAM_ROTATION_180.\n+     * When setting to CAMERA3_STREAM_ROTATION_90 or CAMERA3_STREAM_ROTATION_270, HAL would crop,\n+     * rotate the frame by the specified degrees clockwise and scale it up to original size.\n+     * In Chrome OS, it's possible to have a portrait activity run in a landscape screen with\n+     * landscape-mounted camera. The activity would show stretched or rotated preview because it\n+     * does not expect to receive landscape preview frames. To solve this problem, we ask HAL to\n+     * crop, rotate and scale the frames and modify CameraCharacteristics.SENSOR_ORIENTATION\n+     * accordingly to imitate a portrait camera.\n+     * Setting it to CAMERA3_STREAM_ROTATION_0 means no crop-rotate-scale would be performed.\n+     * |cros_rotate_scale_degrees| in all camera3_stream_t of a configure_streams() call must be\n+     * identical. The HAL should return -EINVAL if the degrees are not the same for all the streams.\n+     */\n+    int crop_rotate_scale_degrees;\n+\n+    /* reserved for future use */\n+    void *reserved[6];\n+\n+} camera3_stream_t;\n+\n+/**\n+ * camera3_stream_configuration_t:\n+ *\n+ * A structure of stream definitions, used by configure_streams(). This\n+ * structure defines all the output streams and the reprocessing input\n+ * stream for the current camera use case.\n+ */\n+typedef struct camera3_stream_configuration {\n+    /**\n+     * The total number of streams requested by the framework.  This includes\n+     * both input and output streams. The number of streams will be at least 1,\n+     * and there will be at least one output-capable stream.\n+     */\n+    uint32_t num_streams;\n+\n+    /**\n+     * An array of camera stream pointers, defining the input/output\n+     * configuration for the camera HAL device.\n+     *\n+     * At most one input-capable stream may be defined (INPUT or BIDIRECTIONAL)\n+     * in a single configuration.\n+     *\n+     * At least one output-capable stream must be defined (OUTPUT or\n+     * BIDIRECTIONAL).\n+     */\n+    camera3_stream_t **streams;\n+\n+    /**\n+     * >= CAMERA_DEVICE_API_VERSION_3_3:\n+     *\n+     * The operation mode of streams in this configuration, one of the value defined in\n+     * camera3_stream_configuration_mode_t.\n+     * The HAL can use this mode as an indicator to set the stream property (e.g.,\n+     * camera3_stream->max_buffers) appropriately. For example, if the configuration is\n+     * CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE, the HAL may want to set aside more\n+     * buffers for batch mode operation (see android.control.availableHighSpeedVideoConfigurations\n+     * for batch mode definition).\n+     *\n+     */\n+    uint32_t operation_mode;\n+} camera3_stream_configuration_t;\n+\n+/**\n+ * camera3_buffer_status_t:\n+ *\n+ * The current status of a single stream buffer.\n+ */\n+typedef enum camera3_buffer_status {\n+    /**\n+     * The buffer is in a normal state, and can be used after waiting on its\n+     * sync fence.\n+     */\n+    CAMERA3_BUFFER_STATUS_OK = 0,\n+\n+    /**\n+     * The buffer does not contain valid data, and the data in it should not be\n+     * used. The sync fence must still be waited on before reusing the buffer.\n+     */\n+    CAMERA3_BUFFER_STATUS_ERROR = 1\n+\n+} camera3_buffer_status_t;\n+\n+/**\n+ * camera3_stream_buffer_t:\n+ *\n+ * A single buffer from a camera3 stream. It includes a handle to its parent\n+ * stream, the handle to the gralloc buffer itself, and sync fences\n+ *\n+ * The buffer does not specify whether it is to be used for input or output;\n+ * that is determined by its parent stream type and how the buffer is passed to\n+ * the HAL device.\n+ */\n+typedef struct camera3_stream_buffer {\n+    /**\n+     * The handle of the stream this buffer is associated with\n+     */\n+    camera3_stream_t *stream;\n+\n+    /**\n+     * The native handle to the buffer\n+     */\n+    buffer_handle_t *buffer;\n+\n+    /**\n+     * Current state of the buffer, one of the camera3_buffer_status_t\n+     * values. The framework will not pass buffers to the HAL that are in an\n+     * error state. In case a buffer could not be filled by the HAL, it must\n+     * have its status set to CAMERA3_BUFFER_STATUS_ERROR when returned to the\n+     * framework with process_capture_result().\n+     */\n+    int status;\n+\n+    /**\n+     * The acquire sync fence for this buffer. The HAL must wait on this fence\n+     * fd before attempting to read from or write to this buffer.\n+     *\n+     * The framework may be set to -1 to indicate that no waiting is necessary\n+     * for this buffer.\n+     *\n+     * When the HAL returns an output buffer to the framework with\n+     * process_capture_result(), the acquire_fence must be set to -1. If the HAL\n+     * never waits on the acquire_fence due to an error in filling a buffer,\n+     * when calling process_capture_result() the HAL must set the release_fence\n+     * of the buffer to be the acquire_fence passed to it by the framework. This\n+     * will allow the framework to wait on the fence before reusing the buffer.\n+     *\n+     * For input buffers, the HAL must not change the acquire_fence field during\n+     * the process_capture_request() call.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * When the HAL returns an input buffer to the framework with\n+     * process_capture_result(), the acquire_fence must be set to -1. If the HAL\n+     * never waits on input buffer acquire fence due to an error, the sync\n+     * fences should be handled similarly to the way they are handled for output\n+     * buffers.\n+     */\n+     int acquire_fence;\n+\n+    /**\n+     * The release sync fence for this buffer. The HAL must set this fence when\n+     * returning buffers to the framework, or write -1 to indicate that no\n+     * waiting is required for this buffer.\n+     *\n+     * For the output buffers, the fences must be set in the output_buffers\n+     * array passed to process_capture_result().\n+     *\n+     * <= CAMERA_DEVICE_API_VERSION_3_1:\n+     *\n+     * For the input buffer, the release fence must be set by the\n+     * process_capture_request() call.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * For the input buffer, the fences must be set in the input_buffer\n+     * passed to process_capture_result().\n+     *\n+     * After signaling the release_fence for this buffer, the HAL\n+     * should not make any further attempts to access this buffer as the\n+     * ownership has been fully transferred back to the framework.\n+     *\n+     * If a fence of -1 was specified then the ownership of this buffer\n+     * is transferred back immediately upon the call of process_capture_result.\n+     */\n+    int release_fence;\n+\n+} camera3_stream_buffer_t;\n+\n+/**\n+ * camera3_stream_buffer_set_t:\n+ *\n+ * The complete set of gralloc buffers for a stream. This structure is given to\n+ * register_stream_buffers() to allow the camera HAL device to register/map/etc\n+ * newly allocated stream buffers.\n+ *\n+ * >= CAMERA_DEVICE_API_VERSION_3_2:\n+ *\n+ * Deprecated (and not used). In particular,\n+ * register_stream_buffers is also deprecated and will never be invoked.\n+ *\n+ */\n+typedef struct camera3_stream_buffer_set {\n+    /**\n+     * The stream handle for the stream these buffers belong to\n+     */\n+    camera3_stream_t *stream;\n+\n+    /**\n+     * The number of buffers in this stream. It is guaranteed to be at least\n+     * stream->max_buffers.\n+     */\n+    uint32_t num_buffers;\n+\n+    /**\n+     * The array of gralloc buffer handles for this stream. If the stream format\n+     * is set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL device\n+     * should inspect the passed-in buffers to determine any platform-private\n+     * pixel format information.\n+     */\n+    buffer_handle_t **buffers;\n+\n+} camera3_stream_buffer_set_t;\n+\n+/**\n+ * camera3_jpeg_blob:\n+ *\n+ * Transport header for compressed JPEG buffers in output streams.\n+ *\n+ * To capture JPEG images, a stream is created using the pixel format\n+ * HAL_PIXEL_FORMAT_BLOB. The buffer size for the stream is calculated by the\n+ * framework, based on the static metadata field android.jpeg.maxSize. Since\n+ * compressed JPEG images are of variable size, the HAL needs to include the\n+ * final size of the compressed image using this structure inside the output\n+ * stream buffer. The JPEG blob ID field must be set to CAMERA3_JPEG_BLOB_ID.\n+ *\n+ * Transport header should be at the end of the JPEG output stream buffer. That\n+ * means the jpeg_blob_id must start at byte[buffer_size -\n+ * sizeof(camera3_jpeg_blob)], where the buffer_size is the size of gralloc buffer.\n+ * Any HAL using this transport header must account for it in android.jpeg.maxSize\n+ * The JPEG data itself starts at the beginning of the buffer and should be\n+ * jpeg_size bytes long.\n+ */\n+typedef struct camera3_jpeg_blob {\n+    uint16_t jpeg_blob_id;\n+    uint32_t jpeg_size;\n+} camera3_jpeg_blob_t;\n+\n+enum {\n+    CAMERA3_JPEG_BLOB_ID = 0x00FF\n+};\n+\n+/**********************************************************************\n+ *\n+ * Message definitions for the HAL notify() callback.\n+ *\n+ * These definitions are used for the HAL notify callback, to signal\n+ * asynchronous events from the HAL device to the Android framework.\n+ *\n+ */\n+\n+/**\n+ * camera3_msg_type:\n+ *\n+ * Indicates the type of message sent, which specifies which member of the\n+ * message union is valid.\n+ *\n+ */\n+typedef enum camera3_msg_type {\n+    /**\n+     * An error has occurred. camera3_notify_msg.message.error contains the\n+     * error information.\n+     */\n+    CAMERA3_MSG_ERROR = 1,\n+\n+    /**\n+     * The exposure of a given request or processing a reprocess request has\n+     * begun. camera3_notify_msg.message.shutter contains the information\n+     * the capture.\n+     */\n+    CAMERA3_MSG_SHUTTER = 2,\n+\n+    /**\n+     * Number of framework message types\n+     */\n+    CAMERA3_NUM_MESSAGES\n+\n+} camera3_msg_type_t;\n+\n+/**\n+ * Defined error codes for CAMERA_MSG_ERROR\n+ */\n+typedef enum camera3_error_msg_code {\n+    /**\n+     * A serious failure occured. No further frames or buffer streams will\n+     * be produced by the device. Device should be treated as closed. The\n+     * client must reopen the device to use it again. The frame_number field\n+     * is unused.\n+     */\n+    CAMERA3_MSG_ERROR_DEVICE = 1,\n+\n+    /**\n+     * An error has occurred in processing a request. No output (metadata or\n+     * buffers) will be produced for this request. The frame_number field\n+     * specifies which request has been dropped. Subsequent requests are\n+     * unaffected, and the device remains operational.\n+     */\n+    CAMERA3_MSG_ERROR_REQUEST = 2,\n+\n+    /**\n+     * An error has occurred in producing an output result metadata buffer\n+     * for a request, but output stream buffers for it will still be\n+     * available. Subsequent requests are unaffected, and the device remains\n+     * operational.  The frame_number field specifies the request for which\n+     * result metadata won't be available.\n+     */\n+    CAMERA3_MSG_ERROR_RESULT = 3,\n+\n+    /**\n+     * An error has occurred in placing an output buffer into a stream for a\n+     * request. The frame metadata and other buffers may still be\n+     * available. Subsequent requests are unaffected, and the device remains\n+     * operational. The frame_number field specifies the request for which the\n+     * buffer was dropped, and error_stream contains a pointer to the stream\n+     * that dropped the frame.u\n+     */\n+    CAMERA3_MSG_ERROR_BUFFER = 4,\n+\n+    /**\n+     * Number of error types\n+     */\n+    CAMERA3_MSG_NUM_ERRORS\n+\n+} camera3_error_msg_code_t;\n+\n+/**\n+ * camera3_error_msg_t:\n+ *\n+ * Message contents for CAMERA3_MSG_ERROR\n+ */\n+typedef struct camera3_error_msg {\n+    /**\n+     * Frame number of the request the error applies to. 0 if the frame number\n+     * isn't applicable to the error.\n+     */\n+    uint32_t frame_number;\n+\n+    /**\n+     * Pointer to the stream that had a failure. NULL if the stream isn't\n+     * applicable to the error.\n+     */\n+    camera3_stream_t *error_stream;\n+\n+    /**\n+     * The code for this error; one of the CAMERA_MSG_ERROR enum values.\n+     */\n+    int error_code;\n+\n+} camera3_error_msg_t;\n+\n+/**\n+ * camera3_shutter_msg_t:\n+ *\n+ * Message contents for CAMERA3_MSG_SHUTTER\n+ */\n+typedef struct camera3_shutter_msg {\n+    /**\n+     * Frame number of the request that has begun exposure or reprocessing.\n+     */\n+    uint32_t frame_number;\n+\n+    /**\n+     * Timestamp for the start of capture. For a reprocess request, this must\n+     * be input image's start of capture. This must match the capture result\n+     * metadata's sensor exposure start timestamp.\n+     */\n+    uint64_t timestamp;\n+\n+} camera3_shutter_msg_t;\n+\n+/**\n+ * camera3_notify_msg_t:\n+ *\n+ * The message structure sent to camera3_callback_ops_t.notify()\n+ */\n+typedef struct camera3_notify_msg {\n+\n+    /**\n+     * The message type. One of camera3_notify_msg_type, or a private extension.\n+     */\n+    int type;\n+\n+    union {\n+        /**\n+         * Error message contents. Valid if type is CAMERA3_MSG_ERROR\n+         */\n+        camera3_error_msg_t error;\n+\n+        /**\n+         * Shutter message contents. Valid if type is CAMERA3_MSG_SHUTTER\n+         */\n+        camera3_shutter_msg_t shutter;\n+\n+        /**\n+         * Generic message contents. Used to ensure a minimum size for custom\n+         * message types.\n+         */\n+        uint8_t generic[32];\n+    } message;\n+\n+} camera3_notify_msg_t;\n+\n+/**********************************************************************\n+ *\n+ * Capture request/result definitions for the HAL process_capture_request()\n+ * method, and the process_capture_result() callback.\n+ *\n+ */\n+\n+/**\n+ * camera3_request_template_t:\n+ *\n+ * Available template types for\n+ * camera3_device_ops.construct_default_request_settings()\n+ */\n+typedef enum camera3_request_template {\n+    /**\n+     * Standard camera preview operation with 3A on auto.\n+     */\n+    CAMERA3_TEMPLATE_PREVIEW = 1,\n+\n+    /**\n+     * Standard camera high-quality still capture with 3A and flash on auto.\n+     */\n+    CAMERA3_TEMPLATE_STILL_CAPTURE = 2,\n+\n+    /**\n+     * Standard video recording plus preview with 3A on auto, torch off.\n+     */\n+    CAMERA3_TEMPLATE_VIDEO_RECORD = 3,\n+\n+    /**\n+     * High-quality still capture while recording video. Application will\n+     * include preview, video record, and full-resolution YUV or JPEG streams in\n+     * request. Must not cause stuttering on video stream. 3A on auto.\n+     */\n+    CAMERA3_TEMPLATE_VIDEO_SNAPSHOT = 4,\n+\n+    /**\n+     * Zero-shutter-lag mode. Application will request preview and\n+     * full-resolution data for each frame, and reprocess it to JPEG when a\n+     * still image is requested by user. Settings should provide highest-quality\n+     * full-resolution images without compromising preview frame rate. 3A on\n+     * auto.\n+     */\n+    CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG = 5,\n+\n+    /**\n+     * A basic template for direct application control of capture\n+     * parameters. All automatic control is disabled (auto-exposure, auto-white\n+     * balance, auto-focus), and post-processing parameters are set to preview\n+     * quality. The manual capture parameters (exposure, sensitivity, etc.)\n+     * are set to reasonable defaults, but should be overridden by the\n+     * application depending on the intended use case.\n+     */\n+    CAMERA3_TEMPLATE_MANUAL = 6,\n+\n+    /* Total number of templates */\n+    CAMERA3_TEMPLATE_COUNT,\n+\n+    /**\n+     * First value for vendor-defined request templates\n+     */\n+    CAMERA3_VENDOR_TEMPLATE_START = 0x40000000\n+\n+} camera3_request_template_t;\n+\n+/**\n+ * camera3_capture_request_t:\n+ *\n+ * A single request for image capture/buffer reprocessing, sent to the Camera\n+ * HAL device by the framework in process_capture_request().\n+ *\n+ * The request contains the settings to be used for this capture, and the set of\n+ * output buffers to write the resulting image data in. It may optionally\n+ * contain an input buffer, in which case the request is for reprocessing that\n+ * input buffer instead of capturing a new image with the camera sensor. The\n+ * capture is identified by the frame_number.\n+ *\n+ * In response, the camera HAL device must send a camera3_capture_result\n+ * structure asynchronously to the framework, using the process_capture_result()\n+ * callback.\n+ */\n+typedef struct camera3_capture_request {\n+    /**\n+     * The frame number is an incrementing integer set by the framework to\n+     * uniquely identify this capture. It needs to be returned in the result\n+     * call, and is also used to identify the request in asynchronous\n+     * notifications sent to camera3_callback_ops_t.notify().\n+     */\n+    uint32_t frame_number;\n+\n+    /**\n+     * The settings buffer contains the capture and processing parameters for\n+     * the request. As a special case, a NULL settings buffer indicates that the\n+     * settings are identical to the most-recently submitted capture request. A\n+     * NULL buffer cannot be used as the first submitted request after a\n+     * configure_streams() call.\n+     */\n+    const camera_metadata_t *settings;\n+\n+    /**\n+     * The input stream buffer to use for this request, if any.\n+     *\n+     * If input_buffer is NULL, then the request is for a new capture from the\n+     * imager. If input_buffer is valid, the request is for reprocessing the\n+     * image contained in input_buffer.\n+     *\n+     * In the latter case, the HAL must set the release_fence of the\n+     * input_buffer to a valid sync fence, or to -1 if the HAL does not support\n+     * sync, before process_capture_request() returns.\n+     *\n+     * The HAL is required to wait on the acquire sync fence of the input buffer\n+     * before accessing it.\n+     *\n+     * <= CAMERA_DEVICE_API_VERSION_3_1:\n+     *\n+     * Any input buffer included here will have been registered with the HAL\n+     * through register_stream_buffers() before its inclusion in a request.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * The buffers will not have been pre-registered with the HAL.\n+     * Subsequent requests may reuse buffers, or provide entirely new buffers.\n+     */\n+    camera3_stream_buffer_t *input_buffer;\n+\n+    /**\n+     * The number of output buffers for this capture request. Must be at least\n+     * 1.\n+     */\n+    uint32_t num_output_buffers;\n+\n+    /**\n+     * An array of num_output_buffers stream buffers, to be filled with image\n+     * data from this capture/reprocess. The HAL must wait on the acquire fences\n+     * of each stream buffer before writing to them.\n+     *\n+     * The HAL takes ownership of the actual buffer_handle_t entries in\n+     * output_buffers; the framework does not access them until they are\n+     * returned in a camera3_capture_result_t.\n+     *\n+     * <= CAMERA_DEVICE_API_VERSION_3_1:\n+     *\n+     * All the buffers included  here will have been registered with the HAL\n+     * through register_stream_buffers() before their inclusion in a request.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * Any or all of the buffers included here may be brand new in this\n+     * request (having never before seen by the HAL).\n+     */\n+    const camera3_stream_buffer_t *output_buffers;\n+\n+} camera3_capture_request_t;\n+\n+/**\n+ * camera3_capture_result_t:\n+ *\n+ * The result of a single capture/reprocess by the camera HAL device. This is\n+ * sent to the framework asynchronously with process_capture_result(), in\n+ * response to a single capture request sent to the HAL with\n+ * process_capture_request(). Multiple process_capture_result() calls may be\n+ * performed by the HAL for each request.\n+ *\n+ * Each call, all with the same frame\n+ * number, may contain some subset of the output buffers, and/or the result\n+ * metadata. The metadata may only be provided once for a given frame number;\n+ * all other calls must set the result metadata to NULL.\n+ *\n+ * The result structure contains the output metadata from this capture, and the\n+ * set of output buffers that have been/will be filled for this capture. Each\n+ * output buffer may come with a release sync fence that the framework will wait\n+ * on before reading, in case the buffer has not yet been filled by the HAL.\n+ *\n+ * >= CAMERA_DEVICE_API_VERSION_3_2:\n+ *\n+ * The metadata may be provided multiple times for a single frame number. The\n+ * framework will accumulate together the final result set by combining each\n+ * partial result together into the total result set.\n+ *\n+ * If an input buffer is given in a request, the HAL must return it in one of\n+ * the process_capture_result calls, and the call may be to just return the input\n+ * buffer, without metadata and output buffers; the sync fences must be handled\n+ * the same way they are done for output buffers.\n+ *\n+ *\n+ * Performance considerations:\n+ *\n+ * Applications will also receive these partial results immediately, so sending\n+ * partial results is a highly recommended performance optimization to avoid\n+ * the total pipeline latency before sending the results for what is known very\n+ * early on in the pipeline.\n+ *\n+ * A typical use case might be calculating the AF state halfway through the\n+ * pipeline; by sending the state back to the framework immediately, we get a\n+ * 50% performance increase and perceived responsiveness of the auto-focus.\n+ *\n+ */\n+typedef struct camera3_capture_result {\n+    /**\n+     * The frame number is an incrementing integer set by the framework in the\n+     * submitted request to uniquely identify this capture. It is also used to\n+     * identify the request in asynchronous notifications sent to\n+     * camera3_callback_ops_t.notify().\n+    */\n+    uint32_t frame_number;\n+\n+    /**\n+     * The result metadata for this capture. This contains information about the\n+     * final capture parameters, the state of the capture and post-processing\n+     * hardware, the state of the 3A algorithms, if enabled, and the output of\n+     * any enabled statistics units.\n+     *\n+     * Only one call to process_capture_result() with a given frame_number may\n+     * include the result metadata. All other calls for the same frame_number\n+     * must set this to NULL.\n+     *\n+     * If there was an error producing the result metadata, result must be an\n+     * empty metadata buffer, and notify() must be called with ERROR_RESULT.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * Multiple calls to process_capture_result() with a given frame_number\n+     * may include the result metadata.\n+     *\n+     * Partial metadata submitted should not include any metadata key returned\n+     * in a previous partial result for a given frame. Each new partial result\n+     * for that frame must also set a distinct partial_result value.\n+     *\n+     * If notify has been called with ERROR_RESULT, all further partial\n+     * results for that frame are ignored by the framework.\n+     */\n+    const camera_metadata_t *result;\n+\n+    /**\n+     * The number of output buffers returned in this result structure. Must be\n+     * less than or equal to the matching capture request's count. If this is\n+     * less than the buffer count in the capture request, at least one more call\n+     * to process_capture_result with the same frame_number must be made, to\n+     * return the remaining output buffers to the framework. This may only be\n+     * zero if the structure includes valid result metadata or an input buffer\n+     * is returned in this result.\n+     */\n+    uint32_t num_output_buffers;\n+\n+    /**\n+     * The handles for the output stream buffers for this capture. They may not\n+     * yet be filled at the time the HAL calls process_capture_result(); the\n+     * framework will wait on the release sync fences provided by the HAL before\n+     * reading the buffers.\n+     *\n+     * The HAL must set the stream buffer's release sync fence to a valid sync\n+     * fd, or to -1 if the buffer has already been filled.\n+     *\n+     * If the HAL encounters an error while processing the buffer, and the\n+     * buffer is not filled, the buffer's status field must be set to\n+     * CAMERA3_BUFFER_STATUS_ERROR. If the HAL did not wait on the acquire fence\n+     * before encountering the error, the acquire fence should be copied into\n+     * the release fence, to allow the framework to wait on the fence before\n+     * reusing the buffer.\n+     *\n+     * The acquire fence must be set to -1 for all output buffers.  If\n+     * num_output_buffers is zero, this may be NULL. In that case, at least one\n+     * more process_capture_result call must be made by the HAL to provide the\n+     * output buffers.\n+     *\n+     * When process_capture_result is called with a new buffer for a frame,\n+     * all previous frames' buffers for that corresponding stream must have been\n+     * already delivered (the fences need not have yet been signaled).\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * Gralloc buffers for a frame may be sent to framework before the\n+     * corresponding SHUTTER-notify.\n+     *\n+     * Performance considerations:\n+     *\n+     * Buffers delivered to the framework will not be dispatched to the\n+     * application layer until a start of exposure timestamp has been received\n+     * via a SHUTTER notify() call. It is highly recommended to\n+     * dispatch that call as early as possible.\n+     */\n+     const camera3_stream_buffer_t *output_buffers;\n+\n+     /**\n+      * >= CAMERA_DEVICE_API_VERSION_3_2:\n+      *\n+      * The handle for the input stream buffer for this capture. It may not\n+      * yet be consumed at the time the HAL calls process_capture_result(); the\n+      * framework will wait on the release sync fences provided by the HAL before\n+      * reusing the buffer.\n+      *\n+      * The HAL should handle the sync fences the same way they are done for\n+      * output_buffers.\n+      *\n+      * Only one input buffer is allowed to be sent per request. Similarly to\n+      * output buffers, the ordering of returned input buffers must be\n+      * maintained by the HAL.\n+      *\n+      * Performance considerations:\n+      *\n+      * The input buffer should be returned as early as possible. If the HAL\n+      * supports sync fences, it can call process_capture_result to hand it back\n+      * with sync fences being set appropriately. If the sync fences are not\n+      * supported, the buffer can only be returned when it is consumed, which\n+      * may take long time; the HAL may choose to copy this input buffer to make\n+      * the buffer return sooner.\n+      */\n+      const camera3_stream_buffer_t *input_buffer;\n+\n+     /**\n+      * >= CAMERA_DEVICE_API_VERSION_3_2:\n+      *\n+      * In order to take advantage of partial results, the HAL must set the\n+      * static metadata android.request.partialResultCount to the number of\n+      * partial results it will send for each frame.\n+      *\n+      * Each new capture result with a partial result must set\n+      * this field (partial_result) to a distinct inclusive value between\n+      * 1 and android.request.partialResultCount.\n+      *\n+      * HALs not wishing to take advantage of this feature must not\n+      * set an android.request.partialResultCount or partial_result to a value\n+      * other than 1.\n+      *\n+      * This value must be set to 0 when a capture result contains buffers only\n+      * and no metadata.\n+      */\n+     uint32_t partial_result;\n+\n+} camera3_capture_result_t;\n+\n+/**********************************************************************\n+ *\n+ * Callback methods for the HAL to call into the framework.\n+ *\n+ * These methods are used to return metadata and image buffers for a completed\n+ * or failed captures, and to notify the framework of asynchronous events such\n+ * as errors.\n+ *\n+ * The framework will not call back into the HAL from within these callbacks,\n+ * and these calls will not block for extended periods.\n+ *\n+ */\n+typedef struct camera3_callback_ops {\n+\n+    /**\n+     * process_capture_result:\n+     *\n+     * Send results from a completed capture to the framework.\n+     * process_capture_result() may be invoked multiple times by the HAL in\n+     * response to a single capture request. This allows, for example, the\n+     * metadata and low-resolution buffers to be returned in one call, and\n+     * post-processed JPEG buffers in a later call, once it is available. Each\n+     * call must include the frame number of the request it is returning\n+     * metadata or buffers for.\n+     *\n+     * A component (buffer or metadata) of the complete result may only be\n+     * included in one process_capture_result call. A buffer for each stream,\n+     * and the result metadata, must be returned by the HAL for each request in\n+     * one of the process_capture_result calls, even in case of errors producing\n+     * some of the output. A call to process_capture_result() with neither\n+     * output buffers or result metadata is not allowed.\n+     *\n+     * The order of returning metadata and buffers for a single result does not\n+     * matter, but buffers for a given stream must be returned in FIFO order. So\n+     * the buffer for request 5 for stream A must always be returned before the\n+     * buffer for request 6 for stream A. This also applies to the result\n+     * metadata; the metadata for request 5 must be returned before the metadata\n+     * for request 6.\n+     *\n+     * However, different streams are independent of each other, so it is\n+     * acceptable and expected that the buffer for request 5 for stream A may be\n+     * returned after the buffer for request 6 for stream B is. And it is\n+     * acceptable that the result metadata for request 6 for stream B is\n+     * returned before the buffer for request 5 for stream A is.\n+     *\n+     * The HAL retains ownership of result structure, which only needs to be\n+     * valid to access during this call. The framework will copy whatever it\n+     * needs before this call returns.\n+     *\n+     * The output buffers do not need to be filled yet; the framework will wait\n+     * on the stream buffer release sync fence before reading the buffer\n+     * data. Therefore, this method should be called by the HAL as soon as\n+     * possible, even if some or all of the output buffers are still in\n+     * being filled. The HAL must include valid release sync fences into each\n+     * output_buffers stream buffer entry, or -1 if that stream buffer is\n+     * already filled.\n+     *\n+     * If the result buffer cannot be constructed for a request, the HAL should\n+     * return an empty metadata buffer, but still provide the output buffers and\n+     * their sync fences. In addition, notify() must be called with an\n+     * ERROR_RESULT message.\n+     *\n+     * If an output buffer cannot be filled, its status field must be set to\n+     * STATUS_ERROR. In addition, notify() must be called with a ERROR_BUFFER\n+     * message.\n+     *\n+     * If the entire capture has failed, then this method still needs to be\n+     * called to return the output buffers to the framework. All the buffer\n+     * statuses should be STATUS_ERROR, and the result metadata should be an\n+     * empty buffer. In addition, notify() must be called with a ERROR_REQUEST\n+     * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages\n+     * should not be sent.\n+     *\n+     * Performance requirements:\n+     *\n+     * This is a non-blocking call. The framework will return this call in 5ms.\n+     *\n+     * The pipeline latency (see S7 for definition) should be less than or equal to\n+     * 4 frame intervals, and must be less than or equal to 8 frame intervals.\n+     *\n+     */\n+    void (*process_capture_result)(const struct camera3_callback_ops *,\n+            const camera3_capture_result_t *result);\n+\n+    /**\n+     * notify:\n+     *\n+     * Asynchronous notification callback from the HAL, fired for various\n+     * reasons. Only for information independent of frame capture, or that\n+     * require specific timing. The ownership of the message structure remains\n+     * with the HAL, and the msg only needs to be valid for the duration of this\n+     * call.\n+     *\n+     * Multiple threads may call notify() simultaneously.\n+     *\n+     * <= CAMERA_DEVICE_API_VERSION_3_1:\n+     *\n+     * The notification for the start of exposure for a given request must be\n+     * sent by the HAL before the first call to process_capture_result() for\n+     * that request is made.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * Buffers delivered to the framework will not be dispatched to the\n+     * application layer until a start of exposure timestamp (or input image's\n+     * start of exposure timestamp for a reprocess request) has been received\n+     * via a SHUTTER notify() call. It is highly recommended to dispatch this\n+     * call as early as possible.\n+     *\n+     * ------------------------------------------------------------------------\n+     * Performance requirements:\n+     *\n+     * This is a non-blocking call. The framework will return this call in 5ms.\n+     */\n+    void (*notify)(const struct camera3_callback_ops *,\n+            const camera3_notify_msg_t *msg);\n+\n+} camera3_callback_ops_t;\n+\n+/**********************************************************************\n+ *\n+ * Camera device operations\n+ *\n+ */\n+typedef struct camera3_device_ops {\n+\n+    /**\n+     * initialize:\n+     *\n+     * One-time initialization to pass framework callback function pointers to\n+     * the HAL. Will be called once after a successful open() call, before any\n+     * other functions are called on the camera3_device_ops structure.\n+     *\n+     * Performance requirements:\n+     *\n+     * This should be a non-blocking call. The HAL should return from this call\n+     * in 5ms, and must return from this call in 10ms.\n+     *\n+     * Return values:\n+     *\n+     *  0:     On successful initialization\n+     *\n+     * -ENODEV: If initialization fails. Only close() can be called successfully\n+     *          by the framework after this.\n+     */\n+    int (*initialize)(const struct camera3_device *,\n+            const camera3_callback_ops_t *callback_ops);\n+\n+    /**********************************************************************\n+     * Stream management\n+     */\n+\n+    /**\n+     * configure_streams:\n+     *\n+     * CAMERA_DEVICE_API_VERSION_3_0 only:\n+     *\n+     * Reset the HAL camera device processing pipeline and set up new input and\n+     * output streams. This call replaces any existing stream configuration with\n+     * the streams defined in the stream_list. This method will be called at\n+     * least once after initialize() before a request is submitted with\n+     * process_capture_request().\n+     *\n+     * The stream_list must contain at least one output-capable stream, and may\n+     * not contain more than one input-capable stream.\n+     *\n+     * The stream_list may contain streams that are also in the currently-active\n+     * set of streams (from the previous call to configure_stream()). These\n+     * streams will already have valid values for usage, max_buffers, and the\n+     * private pointer.\n+     *\n+     * If such a stream has already had its buffers registered,\n+     * register_stream_buffers() will not be called again for the stream, and\n+     * buffers from the stream can be immediately included in input requests.\n+     *\n+     * If the HAL needs to change the stream configuration for an existing\n+     * stream due to the new configuration, it may rewrite the values of usage\n+     * and/or max_buffers during the configure call.\n+     *\n+     * The framework will detect such a change, and will then reallocate the\n+     * stream buffers, and call register_stream_buffers() again before using\n+     * buffers from that stream in a request.\n+     *\n+     * If a currently-active stream is not included in stream_list, the HAL may\n+     * safely remove any references to that stream. It will not be reused in a\n+     * later configure() call by the framework, and all the gralloc buffers for\n+     * it will be freed after the configure_streams() call returns.\n+     *\n+     * The stream_list structure is owned by the framework, and may not be\n+     * accessed once this call completes. The address of an individual\n+     * camera3_stream_t structure will remain valid for access by the HAL until\n+     * the end of the first configure_stream() call which no longer includes\n+     * that camera3_stream_t in the stream_list argument. The HAL may not change\n+     * values in the stream structure outside of the private pointer, except for\n+     * the usage and max_buffers members during the configure_streams() call\n+     * itself.\n+     *\n+     * If the stream is new, the usage, max_buffer, and private pointer fields\n+     * of the stream structure will all be set to 0. The HAL device must set\n+     * these fields before the configure_streams() call returns. These fields\n+     * are then used by the framework and the platform gralloc module to\n+     * allocate the gralloc buffers for each stream.\n+     *\n+     * Before such a new stream can have its buffers included in a capture\n+     * request, the framework will call register_stream_buffers() with that\n+     * stream. However, the framework is not required to register buffers for\n+     * _all_ streams before submitting a request. This allows for quick startup\n+     * of (for example) a preview stream, with allocation for other streams\n+     * happening later or concurrently.\n+     *\n+     * ------------------------------------------------------------------------\n+     * CAMERA_DEVICE_API_VERSION_3_1 only:\n+     *\n+     * Reset the HAL camera device processing pipeline and set up new input and\n+     * output streams. This call replaces any existing stream configuration with\n+     * the streams defined in the stream_list. This method will be called at\n+     * least once after initialize() before a request is submitted with\n+     * process_capture_request().\n+     *\n+     * The stream_list must contain at least one output-capable stream, and may\n+     * not contain more than one input-capable stream.\n+     *\n+     * The stream_list may contain streams that are also in the currently-active\n+     * set of streams (from the previous call to configure_stream()). These\n+     * streams will already have valid values for usage, max_buffers, and the\n+     * private pointer.\n+     *\n+     * If such a stream has already had its buffers registered,\n+     * register_stream_buffers() will not be called again for the stream, and\n+     * buffers from the stream can be immediately included in input requests.\n+     *\n+     * If the HAL needs to change the stream configuration for an existing\n+     * stream due to the new configuration, it may rewrite the values of usage\n+     * and/or max_buffers during the configure call.\n+     *\n+     * The framework will detect such a change, and will then reallocate the\n+     * stream buffers, and call register_stream_buffers() again before using\n+     * buffers from that stream in a request.\n+     *\n+     * If a currently-active stream is not included in stream_list, the HAL may\n+     * safely remove any references to that stream. It will not be reused in a\n+     * later configure() call by the framework, and all the gralloc buffers for\n+     * it will be freed after the configure_streams() call returns.\n+     *\n+     * The stream_list structure is owned by the framework, and may not be\n+     * accessed once this call completes. The address of an individual\n+     * camera3_stream_t structure will remain valid for access by the HAL until\n+     * the end of the first configure_stream() call which no longer includes\n+     * that camera3_stream_t in the stream_list argument. The HAL may not change\n+     * values in the stream structure outside of the private pointer, except for\n+     * the usage and max_buffers members during the configure_streams() call\n+     * itself.\n+     *\n+     * If the stream is new, max_buffer, and private pointer fields of the\n+     * stream structure will all be set to 0. The usage will be set to the\n+     * consumer usage flags. The HAL device must set these fields before the\n+     * configure_streams() call returns. These fields are then used by the\n+     * framework and the platform gralloc module to allocate the gralloc\n+     * buffers for each stream.\n+     *\n+     * Before such a new stream can have its buffers included in a capture\n+     * request, the framework will call register_stream_buffers() with that\n+     * stream. However, the framework is not required to register buffers for\n+     * _all_ streams before submitting a request. This allows for quick startup\n+     * of (for example) a preview stream, with allocation for other streams\n+     * happening later or concurrently.\n+     *\n+     * ------------------------------------------------------------------------\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * Reset the HAL camera device processing pipeline and set up new input and\n+     * output streams. This call replaces any existing stream configuration with\n+     * the streams defined in the stream_list. This method will be called at\n+     * least once after initialize() before a request is submitted with\n+     * process_capture_request().\n+     *\n+     * The stream_list must contain at least one output-capable stream, and may\n+     * not contain more than one input-capable stream.\n+     *\n+     * The stream_list may contain streams that are also in the currently-active\n+     * set of streams (from the previous call to configure_stream()). These\n+     * streams will already have valid values for usage, max_buffers, and the\n+     * private pointer.\n+     *\n+     * If the HAL needs to change the stream configuration for an existing\n+     * stream due to the new configuration, it may rewrite the values of usage\n+     * and/or max_buffers during the configure call.\n+     *\n+     * The framework will detect such a change, and may then reallocate the\n+     * stream buffers before using buffers from that stream in a request.\n+     *\n+     * If a currently-active stream is not included in stream_list, the HAL may\n+     * safely remove any references to that stream. It will not be reused in a\n+     * later configure() call by the framework, and all the gralloc buffers for\n+     * it will be freed after the configure_streams() call returns.\n+     *\n+     * The stream_list structure is owned by the framework, and may not be\n+     * accessed once this call completes. The address of an individual\n+     * camera3_stream_t structure will remain valid for access by the HAL until\n+     * the end of the first configure_stream() call which no longer includes\n+     * that camera3_stream_t in the stream_list argument. The HAL may not change\n+     * values in the stream structure outside of the private pointer, except for\n+     * the usage and max_buffers members during the configure_streams() call\n+     * itself.\n+     *\n+     * If the stream is new, max_buffer, and private pointer fields of the\n+     * stream structure will all be set to 0. The usage will be set to the\n+     * consumer usage flags. The HAL device must set these fields before the\n+     * configure_streams() call returns. These fields are then used by the\n+     * framework and the platform gralloc module to allocate the gralloc\n+     * buffers for each stream.\n+     *\n+     * Newly allocated buffers may be included in a capture request at any time\n+     * by the framework. Once a gralloc buffer is returned to the framework\n+     * with process_capture_result (and its respective release_fence has been\n+     * signaled) the framework may free or reuse it at any time.\n+     *\n+     * ------------------------------------------------------------------------\n+     *\n+     * Preconditions:\n+     *\n+     * The framework will only call this method when no captures are being\n+     * processed. That is, all results have been returned to the framework, and\n+     * all in-flight input and output buffers have been returned and their\n+     * release sync fences have been signaled by the HAL. The framework will not\n+     * submit new requests for capture while the configure_streams() call is\n+     * underway.\n+     *\n+     * Postconditions:\n+     *\n+     * The HAL device must configure itself to provide maximum possible output\n+     * frame rate given the sizes and formats of the output streams, as\n+     * documented in the camera device's static metadata.\n+     *\n+     * Performance requirements:\n+     *\n+     * This call is expected to be heavyweight and possibly take several hundred\n+     * milliseconds to complete, since it may require resetting and\n+     * reconfiguring the image sensor and the camera processing pipeline.\n+     * Nevertheless, the HAL device should attempt to minimize the\n+     * reconfiguration delay to minimize the user-visible pauses during\n+     * application operational mode changes (such as switching from still\n+     * capture to video recording).\n+     *\n+     * The HAL should return from this call in 500ms, and must return from this\n+     * call in 1000ms.\n+     *\n+     * Return values:\n+     *\n+     *  0:      On successful stream configuration\n+     *\n+     * -EINVAL: If the requested stream configuration is invalid. Some examples\n+     *          of invalid stream configurations include:\n+     *\n+     *          - Including more than 1 input-capable stream (INPUT or\n+     *            BIDIRECTIONAL)\n+     *\n+     *          - Not including any output-capable streams (OUTPUT or\n+     *            BIDIRECTIONAL)\n+     *\n+     *          - Including streams with unsupported formats, or an unsupported\n+     *            size for that format.\n+     *\n+     *          - Including too many output streams of a certain format.\n+     *\n+     *          - Unsupported rotation configuration (only applies to\n+     *            devices with version >= CAMERA_DEVICE_API_VERSION_3_3)\n+     *\n+     *          - Stream sizes/formats don't satisfy the\n+     *            camera3_stream_configuration_t->operation_mode requirements for non-NORMAL mode,\n+     *            or the requested operation_mode is not supported by the HAL.\n+     *            (only applies to devices with version >= CAMERA_DEVICE_API_VERSION_3_3)\n+     *\n+     *          Note that the framework submitting an invalid stream\n+     *          configuration is not normal operation, since stream\n+     *          configurations are checked before configure. An invalid\n+     *          configuration means that a bug exists in the framework code, or\n+     *          there is a mismatch between the HAL's static metadata and the\n+     *          requirements on streams.\n+     *\n+     * -ENODEV: If there has been a fatal error and the device is no longer\n+     *          operational. Only close() can be called successfully by the\n+     *          framework after this error is returned.\n+     */\n+    int (*configure_streams)(const struct camera3_device *,\n+            camera3_stream_configuration_t *stream_list);\n+\n+    /**\n+     * register_stream_buffers:\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * DEPRECATED. This will not be called and must be set to NULL.\n+     *\n+     * <= CAMERA_DEVICE_API_VERSION_3_1:\n+     *\n+     * Register buffers for a given stream with the HAL device. This method is\n+     * called by the framework after a new stream is defined by\n+     * configure_streams, and before buffers from that stream are included in a\n+     * capture request. If the same stream is listed in a subsequent\n+     * configure_streams() call, register_stream_buffers will _not_ be called\n+     * again for that stream.\n+     *\n+     * The framework does not need to register buffers for all configured\n+     * streams before it submits the first capture request. This allows quick\n+     * startup for preview (or similar use cases) while other streams are still\n+     * being allocated.\n+     *\n+     * This method is intended to allow the HAL device to map or otherwise\n+     * prepare the buffers for later use. The buffers passed in will already be\n+     * locked for use. At the end of the call, all the buffers must be ready to\n+     * be returned to the stream.  The buffer_set argument is only valid for the\n+     * duration of this call.\n+     *\n+     * If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,\n+     * the camera HAL should inspect the passed-in buffers here to determine any\n+     * platform-private pixel format information.\n+     *\n+     * Performance requirements:\n+     *\n+     * This should be a non-blocking call. The HAL should return from this call\n+     * in 1ms, and must return from this call in 5ms.\n+     *\n+     * Return values:\n+     *\n+     *  0:      On successful registration of the new stream buffers\n+     *\n+     * -EINVAL: If the stream_buffer_set does not refer to a valid active\n+     *          stream, or if the buffers array is invalid.\n+     *\n+     * -ENOMEM: If there was a failure in registering the buffers. The framework\n+     *          must consider all the stream buffers to be unregistered, and can\n+     *          try to register again later.\n+     *\n+     * -ENODEV: If there is a fatal error, and the device is no longer\n+     *          operational. Only close() can be called successfully by the\n+     *          framework after this error is returned.\n+     */\n+    int (*register_stream_buffers)(const struct camera3_device *,\n+            const camera3_stream_buffer_set_t *buffer_set);\n+\n+    /**********************************************************************\n+     * Request creation and submission\n+     */\n+\n+    /**\n+     * construct_default_request_settings:\n+     *\n+     * Create capture settings for standard camera use cases.\n+     *\n+     * The device must return a settings buffer that is configured to meet the\n+     * requested use case, which must be one of the CAMERA3_TEMPLATE_*\n+     * enums. All request control fields must be included.\n+     *\n+     * The HAL retains ownership of this structure, but the pointer to the\n+     * structure must be valid until the device is closed. The framework and the\n+     * HAL may not modify the buffer once it is returned by this call. The same\n+     * buffer may be returned for subsequent calls for the same template, or for\n+     * other templates.\n+     *\n+     * Performance requirements:\n+     *\n+     * This should be a non-blocking call. The HAL should return from this call\n+     * in 1ms, and must return from this call in 5ms.\n+     *\n+     * Return values:\n+     *\n+     *   Valid metadata: On successful creation of a default settings\n+     *                   buffer.\n+     *\n+     *   NULL:           In case of a fatal error. After this is returned, only\n+     *                   the close() method can be called successfully by the\n+     *                   framework.\n+     */\n+    const camera_metadata_t* (*construct_default_request_settings)(\n+            const struct camera3_device *,\n+            int type);\n+\n+    /**\n+     * process_capture_request:\n+     *\n+     * Send a new capture request to the HAL. The HAL should not return from\n+     * this call until it is ready to accept the next request to process. Only\n+     * one call to process_capture_request() will be made at a time by the\n+     * framework, and the calls will all be from the same thread. The next call\n+     * to process_capture_request() will be made as soon as a new request and\n+     * its associated buffers are available. In a normal preview scenario, this\n+     * means the function will be called again by the framework almost\n+     * instantly.\n+     *\n+     * The actual request processing is asynchronous, with the results of\n+     * capture being returned by the HAL through the process_capture_result()\n+     * call. This call requires the result metadata to be available, but output\n+     * buffers may simply provide sync fences to wait on. Multiple requests are\n+     * expected to be in flight at once, to maintain full output frame rate.\n+     *\n+     * The framework retains ownership of the request structure. It is only\n+     * guaranteed to be valid during this call. The HAL device must make copies\n+     * of the information it needs to retain for the capture processing. The HAL\n+     * is responsible for waiting on and closing the buffers' fences and\n+     * returning the buffer handles to the framework.\n+     *\n+     * The HAL must write the file descriptor for the input buffer's release\n+     * sync fence into input_buffer->release_fence, if input_buffer is not\n+     * NULL. If the HAL returns -1 for the input buffer release sync fence, the\n+     * framework is free to immediately reuse the input buffer. Otherwise, the\n+     * framework will wait on the sync fence before refilling and reusing the\n+     * input buffer.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *\n+     * The input/output buffers provided by the framework in each request\n+     * may be brand new (having never before seen by the HAL).\n+     *\n+     * ------------------------------------------------------------------------\n+     * Performance considerations:\n+     *\n+     * Handling a new buffer should be extremely lightweight and there should be\n+     * no frame rate degradation or frame jitter introduced.\n+     *\n+     * This call must return fast enough to ensure that the requested frame\n+     * rate can be sustained, especially for streaming cases (post-processing\n+     * quality settings set to FAST). The HAL should return this call in 1\n+     * frame interval, and must return from this call in 4 frame intervals.\n+     *\n+     * Return values:\n+     *\n+     *  0:      On a successful start to processing the capture request\n+     *\n+     * -EINVAL: If the input is malformed (the settings are NULL when not\n+     *          allowed, there are 0 output buffers, etc) and capture processing\n+     *          cannot start. Failures during request processing should be\n+     *          handled by calling camera3_callback_ops_t.notify(). In case of\n+     *          this error, the framework will retain responsibility for the\n+     *          stream buffers' fences and the buffer handles; the HAL should\n+     *          not close the fences or return these buffers with\n+     *          process_capture_result.\n+     *\n+     * -ENODEV: If the camera device has encountered a serious error. After this\n+     *          error is returned, only the close() method can be successfully\n+     *          called by the framework.\n+     *\n+     */\n+    int (*process_capture_request)(const struct camera3_device *,\n+            camera3_capture_request_t *request);\n+\n+    /**********************************************************************\n+     * Miscellaneous methods\n+     */\n+\n+    /**\n+     * get_metadata_vendor_tag_ops:\n+     *\n+     * Get methods to query for vendor extension metadata tag information. The\n+     * HAL should fill in all the vendor tag operation methods, or leave ops\n+     * unchanged if no vendor tags are defined.\n+     *\n+     * The definition of vendor_tag_query_ops_t can be found in\n+     * system/media/camera/include/system/camera_metadata.h.\n+     *\n+     * >= CAMERA_DEVICE_API_VERSION_3_2:\n+     *    DEPRECATED. This function has been deprecated and should be set to\n+     *    NULL by the HAL.  Please implement get_vendor_tag_ops in camera_common.h\n+     *    instead.\n+     */\n+    void (*get_metadata_vendor_tag_ops)(const struct camera3_device*,\n+            vendor_tag_query_ops_t* ops);\n+\n+    /**\n+     * dump:\n+     *\n+     * Print out debugging state for the camera device. This will be called by\n+     * the framework when the camera service is asked for a debug dump, which\n+     * happens when using the dumpsys tool, or when capturing a bugreport.\n+     *\n+     * The passed-in file descriptor can be used to write debugging text using\n+     * dprintf() or write(). The text should be in ASCII encoding only.\n+     *\n+     * Performance requirements:\n+     *\n+     * This must be a non-blocking call. The HAL should return from this call\n+     * in 1ms, must return from this call in 10ms. This call must avoid\n+     * deadlocks, as it may be called at any point during camera operation.\n+     * Any synchronization primitives used (such as mutex locks or semaphores)\n+     * should be acquired with a timeout.\n+     */\n+    void (*dump)(const struct camera3_device *, int fd);\n+\n+    /**\n+     * flush:\n+     *\n+     * Flush all currently in-process captures and all buffers in the pipeline\n+     * on the given device. The framework will use this to dump all state as\n+     * quickly as possible in order to prepare for a configure_streams() call.\n+     *\n+     * No buffers are required to be successfully returned, so every buffer\n+     * held at the time of flush() (whether successfully filled or not) may be\n+     * returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed\n+     * to return valid (CAMERA3_BUFFER_STATUS_OK) buffers during this call,\n+     * provided they are successfully filled.\n+     *\n+     * All requests currently in the HAL are expected to be returned as soon as\n+     * possible.  Not-in-process requests should return errors immediately. Any\n+     * interruptible hardware blocks should be stopped, and any uninterruptible\n+     * blocks should be waited on.\n+     *\n+     * flush() may be called concurrently to process_capture_request(), with the expectation that\n+     * process_capture_request will return quickly and the request submitted in that\n+     * process_capture_request call is treated like all other in-flight requests.  Due to\n+     * concurrency issues, it is possible that from the HAL's point of view, a\n+     * process_capture_request() call may be started after flush has been invoked but has not\n+     * returned yet. If such a call happens before flush() returns, the HAL should treat the new\n+     * capture request like other in-flight pending requests (see #4 below).\n+     *\n+     * More specifically, the HAL must follow below requirements for various cases:\n+     *\n+     * 1. For captures that are too late for the HAL to cancel/stop, and will be\n+     *    completed normally by the HAL; i.e. the HAL can send shutter/notify and\n+     *    process_capture_result and buffers as normal.\n+     *\n+     * 2. For pending requests that have not done any processing, the HAL must call notify\n+     *    CAMERA3_MSG_ERROR_REQUEST, and return all the output buffers with\n+     *    process_capture_result in the error state (CAMERA3_BUFFER_STATUS_ERROR).\n+     *    The HAL must not place the release fence into an error state, instead,\n+     *    the release fences must be set to the acquire fences passed by the framework,\n+     *    or -1 if they have been waited on by the HAL already. This is also the path\n+     *    to follow for any captures for which the HAL already called notify() with\n+     *    CAMERA3_MSG_SHUTTER but won't be producing any metadata/valid buffers for.\n+     *    After CAMERA3_MSG_ERROR_REQUEST, for a given frame, only process_capture_results with\n+     *    buffers in CAMERA3_BUFFER_STATUS_ERROR are allowed. No further notifys or\n+     *    process_capture_result with non-null metadata is allowed.\n+     *\n+     * 3. For partially completed pending requests that will not have all the output\n+     *    buffers or perhaps missing metadata, the HAL should follow below:\n+     *\n+     *    3.1. Call notify with CAMERA3_MSG_ERROR_RESULT if some of the expected result\n+     *    metadata (i.e. one or more partial metadata) won't be available for the capture.\n+     *\n+     *    3.2. Call notify with CAMERA3_MSG_ERROR_BUFFER for every buffer that won't\n+     *         be produced for the capture.\n+     *\n+     *    3.3  Call notify with CAMERA3_MSG_SHUTTER with the capture timestamp before\n+     *         any buffers/metadata are returned with process_capture_result.\n+     *\n+     *    3.4 For captures that will produce some results, the HAL must not call\n+     *        CAMERA3_MSG_ERROR_REQUEST, since that indicates complete failure.\n+     *\n+     *    3.5. Valid buffers/metadata should be passed to the framework as normal.\n+     *\n+     *    3.6. Failed buffers should be returned to the framework as described for case 2.\n+     *         But failed buffers do not have to follow the strict ordering valid buffers do,\n+     *         and may be out-of-order with respect to valid buffers. For example, if buffers\n+     *         A, B, C, D, E are sent, D and E are failed, then A, E, B, D, C is an acceptable\n+     *         return order.\n+     *\n+     *    3.7. For fully-missing metadata, calling CAMERA3_MSG_ERROR_RESULT is sufficient, no\n+     *         need to call process_capture_result with NULL metadata or equivalent.\n+     *\n+     * 4. If a flush() is invoked while a process_capture_request() invocation is active, that\n+     *    process call should return as soon as possible. In addition, if a process_capture_request()\n+     *    call is made after flush() has been invoked but before flush() has returned, the\n+     *    capture request provided by the late process_capture_request call should be treated like\n+     *    a pending request in case #2 above.\n+     *\n+     * flush() should only return when there are no more outstanding buffers or\n+     * requests left in the HAL. The framework may call configure_streams (as\n+     * the HAL state is now quiesced) or may issue new requests.\n+     *\n+     * Note that it's sufficient to only support fully-succeeded and fully-failed result cases.\n+     * However, it is highly desirable to support the partial failure cases as well, as it\n+     * could help improve the flush call overall performance.\n+     *\n+     * Performance requirements:\n+     *\n+     * The HAL should return from this call in 100ms, and must return from this\n+     * call in 1000ms. And this call must not be blocked longer than pipeline\n+     * latency (see S7 for definition).\n+     *\n+     * Version information:\n+     *\n+     *   only available if device version >= CAMERA_DEVICE_API_VERSION_3_1.\n+     *\n+     * Return values:\n+     *\n+     *  0:      On a successful flush of the camera HAL.\n+     *\n+     * -EINVAL: If the input is malformed (the device is not valid).\n+     *\n+     * -ENODEV: If the camera device has encountered a serious error. After this\n+     *          error is returned, only the close() method can be successfully\n+     *          called by the framework.\n+     */\n+    int (*flush)(const struct camera3_device *);\n+\n+    /* reserved for future use */\n+    void *reserved[8];\n+} camera3_device_ops_t;\n+\n+/**********************************************************************\n+ *\n+ * Camera device definition\n+ *\n+ */\n+typedef struct camera3_device {\n+    /**\n+     * common.version must equal CAMERA_DEVICE_API_VERSION_3_0 to identify this\n+     * device as implementing version 3.0 of the camera device HAL.\n+     *\n+     * Performance requirements:\n+     *\n+     * Camera open (common.module->common.methods->open) should return in 200ms, and must return\n+     * in 500ms.\n+     * Camera close (common.close) should return in 200ms, and must return in 500ms.\n+     *\n+     */\n+    hw_device_t common;\n+    camera3_device_ops_t *ops;\n+    void *priv;\n+} camera3_device_t;\n+\n+__END_DECLS\n+\n+#endif /* #ifdef ANDROID_INCLUDE_CAMERA3_H */\ndiff --git a/include/android/hardware/libhardware/include/hardware/camera_common.h b/include/android/hardware/libhardware/include/hardware/camera_common.h\nnew file mode 100644\nindex 000000000000..7658dd4062f3\n--- /dev/null\n+++ b/include/android/hardware/libhardware/include/hardware/camera_common.h\n@@ -0,0 +1,916 @@\n+/*\n+ * Copyright (C) 2012 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+// FIXME: add well-defined names for cameras\n+\n+#ifndef ANDROID_INCLUDE_CAMERA_COMMON_H\n+#define ANDROID_INCLUDE_CAMERA_COMMON_H\n+\n+#include <stdint.h>\n+#include <stdbool.h>\n+#include <sys/cdefs.h>\n+#include <sys/types.h>\n+#include <cutils/native_handle.h>\n+#include <system/camera.h>\n+#include <system/camera_vendor_tags.h>\n+#include <hardware/hardware.h>\n+#include <hardware/gralloc.h>\n+\n+__BEGIN_DECLS\n+\n+/**\n+ * The id of this module\n+ */\n+#define CAMERA_HARDWARE_MODULE_ID \"camera\"\n+\n+/**\n+ * Module versioning information for the Camera hardware module, based on\n+ * camera_module_t.common.module_api_version. The two most significant hex\n+ * digits represent the major version, and the two least significant represent\n+ * the minor version.\n+ *\n+ *******************************************************************************\n+ * Versions: 0.X - 1.X [CAMERA_MODULE_API_VERSION_1_0]\n+ *\n+ *   Camera modules that report these version numbers implement the initial\n+ *   camera module HAL interface. All camera devices openable through this\n+ *   module support only version 1 of the camera device HAL. The device_version\n+ *   and static_camera_characteristics fields of camera_info are not valid. Only\n+ *   the android.hardware.Camera API can be supported by this module and its\n+ *   devices.\n+ *\n+ *******************************************************************************\n+ * Version: 2.0 [CAMERA_MODULE_API_VERSION_2_0]\n+ *\n+ *   Camera modules that report this version number implement the second version\n+ *   of the camera module HAL interface. Camera devices openable through this\n+ *   module may support either version 1.0 or version 2.0 of the camera device\n+ *   HAL interface. The device_version field of camera_info is always valid; the\n+ *   static_camera_characteristics field of camera_info is valid if the\n+ *   device_version field is 2.0 or higher.\n+ *\n+ *******************************************************************************\n+ * Version: 2.1 [CAMERA_MODULE_API_VERSION_2_1]\n+ *\n+ *   This camera module version adds support for asynchronous callbacks to the\n+ *   framework from the camera HAL module, which is used to notify the framework\n+ *   about changes to the camera module state. Modules that provide a valid\n+ *   set_callbacks() method must report at least this version number.\n+ *\n+ *******************************************************************************\n+ * Version: 2.2 [CAMERA_MODULE_API_VERSION_2_2]\n+ *\n+ *   This camera module version adds vendor tag support from the module, and\n+ *   deprecates the old vendor_tag_query_ops that were previously only\n+ *   accessible with a device open.\n+ *\n+ *******************************************************************************\n+ * Version: 2.3 [CAMERA_MODULE_API_VERSION_2_3]\n+ *\n+ *   This camera module version adds open legacy camera HAL device support.\n+ *   Framework can use it to open the camera device as lower device HAL version\n+ *   HAL device if the same device can support multiple device API versions.\n+ *   The standard hardware module open call (common.methods->open) continues\n+ *   to open the camera device with the latest supported version, which is\n+ *   also the version listed in camera_info_t.device_version.\n+ *\n+ *******************************************************************************\n+ * Version: 2.4 [CAMERA_MODULE_API_VERSION_2_4]\n+ *\n+ * This camera module version adds below API changes:\n+ *\n+ * 1. Torch mode support. The framework can use it to turn on torch mode for\n+ *    any camera device that has a flash unit, without opening a camera device. The\n+ *    camera device has a higher priority accessing the flash unit than the camera\n+ *    module; opening a camera device will turn off the torch if it had been enabled\n+ *    through the module interface. When there are any resource conflicts, such as\n+ *    open() is called to open a camera device, the camera HAL module must notify the\n+ *    framework through the torch mode status callback that the torch mode has been\n+ *    turned off.\n+ *\n+ * 2. External camera (e.g. USB hot-plug camera) support. The API updates specify that\n+ *    the camera static info is only available when camera is connected and ready to\n+ *    use for external hot-plug cameras. Calls to get static info will be invalid\n+ *    calls when camera status is not CAMERA_DEVICE_STATUS_PRESENT. The frameworks\n+ *    will only count on device status change callbacks to manage the available external\n+ *    camera list.\n+ *\n+ * 3. Camera arbitration hints. This module version adds support for explicitly\n+ *    indicating the number of camera devices that can be simultaneously opened and used.\n+ *    To specify valid combinations of devices, the resource_cost and conflicting_devices\n+ *    fields should always be set in the camera_info structure returned by the\n+ *    get_camera_info call.\n+ *\n+ * 4. Module initialization method. This will be called by the camera service\n+ *    right after the HAL module is loaded, to allow for one-time initialization\n+ *    of the HAL. It is called before any other module methods are invoked.\n+ */\n+\n+/**\n+ * Predefined macros for currently-defined version numbers\n+ */\n+\n+/**\n+ * All module versions <= HARDWARE_MODULE_API_VERSION(1, 0xFF) must be treated\n+ * as CAMERA_MODULE_API_VERSION_1_0\n+ */\n+#define CAMERA_MODULE_API_VERSION_1_0 HARDWARE_MODULE_API_VERSION(1, 0)\n+#define CAMERA_MODULE_API_VERSION_2_0 HARDWARE_MODULE_API_VERSION(2, 0)\n+#define CAMERA_MODULE_API_VERSION_2_1 HARDWARE_MODULE_API_VERSION(2, 1)\n+#define CAMERA_MODULE_API_VERSION_2_2 HARDWARE_MODULE_API_VERSION(2, 2)\n+#define CAMERA_MODULE_API_VERSION_2_3 HARDWARE_MODULE_API_VERSION(2, 3)\n+#define CAMERA_MODULE_API_VERSION_2_4 HARDWARE_MODULE_API_VERSION(2, 4)\n+\n+#define CAMERA_MODULE_API_VERSION_CURRENT CAMERA_MODULE_API_VERSION_2_4\n+\n+/**\n+ * All device versions <= HARDWARE_DEVICE_API_VERSION(1, 0xFF) must be treated\n+ * as CAMERA_DEVICE_API_VERSION_1_0\n+ */\n+#define CAMERA_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0)\n+#define CAMERA_DEVICE_API_VERSION_2_0 HARDWARE_DEVICE_API_VERSION(2, 0)\n+#define CAMERA_DEVICE_API_VERSION_2_1 HARDWARE_DEVICE_API_VERSION(2, 1)\n+#define CAMERA_DEVICE_API_VERSION_3_0 HARDWARE_DEVICE_API_VERSION(3, 0)\n+#define CAMERA_DEVICE_API_VERSION_3_1 HARDWARE_DEVICE_API_VERSION(3, 1)\n+#define CAMERA_DEVICE_API_VERSION_3_2 HARDWARE_DEVICE_API_VERSION(3, 2)\n+#define CAMERA_DEVICE_API_VERSION_3_3 HARDWARE_DEVICE_API_VERSION(3, 3)\n+\n+// Device version 3.3 is current, older HAL camera device versions are not\n+// recommended for new devices.\n+#define CAMERA_DEVICE_API_VERSION_CURRENT CAMERA_DEVICE_API_VERSION_3_3\n+\n+/**\n+ * Defined in /system/media/camera/include/system/camera_metadata.h\n+ */\n+typedef struct camera_metadata camera_metadata_t;\n+\n+typedef struct camera_info {\n+    /**\n+     * The direction that the camera faces to. See system/core/include/system/camera.h\n+     * for camera facing definitions.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *   It should be CAMERA_FACING_BACK or CAMERA_FACING_FRONT.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4 or higher:\n+     *\n+     *   It should be CAMERA_FACING_BACK, CAMERA_FACING_FRONT or\n+     *   CAMERA_FACING_EXTERNAL.\n+     */\n+    int facing;\n+\n+    /**\n+     * The orientation of the camera image. The value is the angle that the\n+     * camera image needs to be rotated clockwise so it shows correctly on the\n+     * display in its natural orientation. It should be 0, 90, 180, or 270.\n+     *\n+     * For example, suppose a device has a naturally tall screen. The\n+     * back-facing camera sensor is mounted in landscape. You are looking at the\n+     * screen. If the top side of the camera sensor is aligned with the right\n+     * edge of the screen in natural orientation, the value should be 90. If the\n+     * top side of a front-facing camera sensor is aligned with the right of the\n+     * screen, the value should be 270.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *   Valid in all camera_module versions.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4 or higher:\n+     *\n+     *   Valid if camera facing is CAMERA_FACING_BACK or CAMERA_FACING_FRONT,\n+     *   not valid if camera facing is CAMERA_FACING_EXTERNAL.\n+     */\n+    int orientation;\n+\n+    /**\n+     * The value of camera_device_t.common.version.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     *  CAMERA_MODULE_API_VERSION_1_0:\n+     *\n+     *    Not valid. Can be assumed to be CAMERA_DEVICE_API_VERSION_1_0. Do\n+     *    not read this field.\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_0 or higher:\n+     *\n+     *    Always valid\n+     *\n+     */\n+    uint32_t device_version;\n+\n+    /**\n+     * The camera's fixed characteristics, which include all static camera metadata\n+     * specified in system/media/camera/docs/docs.html. This should be a sorted metadata\n+     * buffer, and may not be modified or freed by the caller. The pointer should remain\n+     * valid for the lifetime of the camera module, and values in it may not\n+     * change after it is returned by get_camera_info().\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     *  CAMERA_MODULE_API_VERSION_1_0:\n+     *\n+     *    Not valid. Extra characteristics are not available. Do not read this\n+     *    field.\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_0 or higher:\n+     *\n+     *    Valid if device_version >= CAMERA_DEVICE_API_VERSION_2_0. Do not read\n+     *    otherwise.\n+     *\n+     */\n+    const camera_metadata_t *static_camera_characteristics;\n+\n+    /**\n+     * The total resource \"cost\" of using this camera, represented as an integer\n+     * value in the range [0, 100] where 100 represents total usage of the shared\n+     * resource that is the limiting bottleneck of the camera subsystem.  This may\n+     * be a very rough estimate, and is used as a hint to the camera service to\n+     * determine when to disallow multiple applications from simultaneously\n+     * opening different cameras advertised by the camera service.\n+     *\n+     * The camera service must be able to simultaneously open and use any\n+     * combination of camera devices exposed by the HAL where the sum of\n+     * the resource costs of these cameras is <= 100.  For determining cost,\n+     * each camera device must be assumed to be configured and operating at\n+     * the maximally resource-consuming framerate and stream size settings\n+     * available in the configuration settings exposed for that device through\n+     * the camera metadata.\n+     *\n+     * The camera service may still attempt to simultaneously open combinations\n+     * of camera devices with a total resource cost > 100.  This may succeed or\n+     * fail.  If this succeeds, combinations of configurations that are not\n+     * supported due to resource constraints from having multiple open devices\n+     * should fail during the configure calls.  If the total resource cost is\n+     * <= 100, open and configure should never fail for any stream configuration\n+     * settings or other device capabilities that would normally succeed for a\n+     * device when it is the only open camera device.\n+     *\n+     * This field will be used to determine whether background applications are\n+     * allowed to use this camera device while other applications are using other\n+     * camera devices.  Note: multiple applications will never be allowed by the\n+     * camera service to simultaneously open the same camera device.\n+     *\n+     * Example use cases:\n+     *\n+     * Ex. 1: Camera Device 0 = Back Camera\n+     *        Camera Device 1 = Front Camera\n+     *   - Using both camera devices causes a large framerate slowdown due to\n+     *     limited ISP bandwidth.\n+     *\n+     *   Configuration:\n+     *\n+     *   Camera Device 0 - resource_cost = 51\n+     *                     conflicting_devices = null\n+     *   Camera Device 1 - resource_cost = 51\n+     *                     conflicting_devices = null\n+     *\n+     *   Result:\n+     *\n+     *   Since the sum of the resource costs is > 100, if a higher-priority\n+     *   application has either device open, no lower-priority applications will be\n+     *   allowed by the camera service to open either device.  If a lower-priority\n+     *   application is using a device that a higher-priority subsequently attempts\n+     *   to open, the lower-priority application will be forced to disconnect the\n+     *   the device.\n+     *\n+     *   If the highest-priority application chooses, it may still attempt to open\n+     *   both devices (since these devices are not listed as conflicting in the\n+     *   conflicting_devices fields), but usage of these devices may fail in the\n+     *   open or configure calls.\n+     *\n+     * Ex. 2: Camera Device 0 = Left Back Camera\n+     *        Camera Device 1 = Right Back Camera\n+     *        Camera Device 2 = Combined stereo camera using both right and left\n+     *                          back camera sensors used by devices 0, and 1\n+     *        Camera Device 3 = Front Camera\n+     *   - Due to do hardware constraints, up to two cameras may be open at once. The\n+     *     combined stereo camera may never be used at the same time as either of the\n+     *     two back camera devices (device 0, 1), and typically requires too much\n+     *     bandwidth to use at the same time as the front camera (device 3).\n+     *\n+     *   Configuration:\n+     *\n+     *   Camera Device 0 - resource_cost = 50\n+     *                     conflicting_devices = { 2 }\n+     *   Camera Device 1 - resource_cost = 50\n+     *                     conflicting_devices = { 2 }\n+     *   Camera Device 2 - resource_cost = 100\n+     *                     conflicting_devices = { 0, 1 }\n+     *   Camera Device 3 - resource_cost = 50\n+     *                     conflicting_devices = null\n+     *\n+     *   Result:\n+     *\n+     *   Based on the conflicting_devices fields, the camera service guarantees that\n+     *   the following sets of open devices will never be allowed: { 1, 2 }, { 0, 2 }.\n+     *\n+     *   Based on the resource_cost fields, if a high-priority foreground application\n+     *   is using camera device 0, a background application would be allowed to open\n+     *   camera device 1 or 3 (but would be forced to disconnect it again if the\n+     *   foreground application opened another device).\n+     *\n+     *   The highest priority application may still attempt to simultaneously open\n+     *   devices 0, 2, and 3, but the HAL may fail in open or configure calls for\n+     *   this combination.\n+     *\n+     * Ex. 3: Camera Device 0 = Back Camera\n+     *        Camera Device 1 = Front Camera\n+     *        Camera Device 2 = Low-power Front Camera that uses the same\n+     *                          sensor as device 1, but only exposes image stream\n+     *                          resolutions that can be used in low-power mode\n+     *  - Using both front cameras (device 1, 2) at the same time is impossible due\n+     *    a shared physical sensor.  Using the back and \"high-power\" front camera\n+     *    (device 1) may be impossible for some stream configurations due to hardware\n+     *    limitations, but the \"low-power\" front camera option may always be used as\n+     *    it has special dedicated hardware.\n+     *\n+     *   Configuration:\n+     *\n+     *   Camera Device 0 - resource_cost = 100\n+     *                     conflicting_devices = null\n+     *   Camera Device 1 - resource_cost = 100\n+     *                     conflicting_devices = { 2 }\n+     *   Camera Device 2 - resource_cost = 0\n+     *                     conflicting_devices = { 1 }\n+     *   Result:\n+     *\n+     *   Based on the conflicting_devices fields, the camera service guarantees that\n+     *   the following sets of open devices will never be allowed: { 1, 2 }.\n+     *\n+     *   Based on the resource_cost fields, only the highest priority application\n+     *   may attempt to open both device 0 and 1 at the same time. If a higher-priority\n+     *   application is not using device 1 or 2, a low-priority background application\n+     *   may open device 2 (but will be forced to disconnect it if a higher-priority\n+     *   application subsequently opens device 1 or 2).\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *    Not valid.  Can be assumed to be 100.  Do not read this field.\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_4 or higher:\n+     *\n+     *    Always valid.\n+     */\n+    int resource_cost;\n+\n+    /**\n+     * An array of camera device IDs represented as NULL-terminated strings\n+     * indicating other devices that cannot be simultaneously opened while this\n+     * camera device is in use.\n+     *\n+     * This field is intended to be used to indicate that this camera device\n+     * is a composite of several other camera devices, or otherwise has\n+     * hardware dependencies that prohibit simultaneous usage. If there are no\n+     * dependencies, a NULL may be returned in this field to indicate this.\n+     *\n+     * The camera service will never simultaneously open any of the devices\n+     * in this list while this camera device is open.\n+     *\n+     * The strings pointed to in this field will not be cleaned up by the camera\n+     * service, and must remain while this device is plugged in.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *    Not valid.  Can be assumed to be NULL.  Do not read this field.\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_4 or higher:\n+     *\n+     *    Always valid.\n+     */\n+    char** conflicting_devices;\n+\n+    /**\n+     * The length of the array given in the conflicting_devices field.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *    Not valid.  Can be assumed to be 0.  Do not read this field.\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_4 or higher:\n+     *\n+     *    Always valid.\n+     */\n+    size_t conflicting_devices_length;\n+\n+} camera_info_t;\n+\n+/**\n+ * camera_device_status_t:\n+ *\n+ * The current status of the camera device, as provided by the HAL through the\n+ * camera_module_callbacks.camera_device_status_change() call.\n+ *\n+ * At module load time, the framework will assume all camera devices are in the\n+ * CAMERA_DEVICE_STATUS_PRESENT state. The HAL should invoke\n+ * camera_module_callbacks::camera_device_status_change to inform the framework\n+ * of any initially NOT_PRESENT devices.\n+ *\n+ * Allowed transitions:\n+ *      PRESENT            -> NOT_PRESENT\n+ *      NOT_PRESENT        -> ENUMERATING\n+ *      NOT_PRESENT        -> PRESENT\n+ *      ENUMERATING        -> PRESENT\n+ *      ENUMERATING        -> NOT_PRESENT\n+ */\n+typedef enum camera_device_status {\n+    /**\n+     * The camera device is not currently connected, and opening it will return\n+     * failure.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *   Calls to get_camera_info must still succeed, and provide the same information\n+     *   it would if the camera were connected.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4:\n+     *\n+     *   The camera device at this status must return -EINVAL for get_camera_info call,\n+     *   as the device is not connected.\n+     */\n+    CAMERA_DEVICE_STATUS_NOT_PRESENT = 0,\n+\n+    /**\n+     * The camera device is connected, and opening it will succeed.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *   The information returned by get_camera_info cannot change due to this status\n+     *   change. By default, the framework will assume all devices are in this state.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4:\n+     *\n+     *   The information returned by get_camera_info will become valid after a device's\n+     *   status changes to this. By default, the framework will assume all devices are in\n+     *   this state.\n+     */\n+    CAMERA_DEVICE_STATUS_PRESENT = 1,\n+\n+    /**\n+     * The camera device is connected, but it is undergoing an enumeration and\n+     * so opening the device will return -EBUSY.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *   Calls to get_camera_info must still succeed, as if the camera was in the\n+     *   PRESENT status.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4:\n+     *\n+     *   The camera device at this status must return -EINVAL for get_camera_info for call,\n+     *   as the device is not ready.\n+     */\n+    CAMERA_DEVICE_STATUS_ENUMERATING = 2,\n+\n+} camera_device_status_t;\n+\n+/**\n+ * torch_mode_status_t:\n+ *\n+ * The current status of the torch mode, as provided by the HAL through the\n+ * camera_module_callbacks.torch_mode_status_change() call.\n+ *\n+ * The torch mode status of a camera device is applicable only when the camera\n+ * device is present. The framework will not call set_torch_mode() to turn on\n+ * torch mode of a camera device if the camera device is not present. At module\n+ * load time, the framework will assume torch modes are in the\n+ * TORCH_MODE_STATUS_AVAILABLE_OFF state if the camera device is present and\n+ * android.flash.info.available is reported as true via get_camera_info() call.\n+ *\n+ * The behaviors of the camera HAL module that the framework expects in the\n+ * following situations when a camera device's status changes:\n+ *  1. A previously-disconnected camera device becomes connected.\n+ *      After camera_module_callbacks::camera_device_status_change() is invoked\n+ *      to inform the framework that the camera device is present, the framework\n+ *      will assume the camera device's torch mode is in\n+ *      TORCH_MODE_STATUS_AVAILABLE_OFF state. The camera HAL module does not need\n+ *      to invoke camera_module_callbacks::torch_mode_status_change() unless the\n+ *      flash unit is unavailable to use by set_torch_mode().\n+ *\n+ *  2. A previously-connected camera becomes disconnected.\n+ *      After camera_module_callbacks::camera_device_status_change() is invoked\n+ *      to inform the framework that the camera device is not present, the\n+ *      framework will not call set_torch_mode() for the disconnected camera\n+ *      device until its flash unit becomes available again. The camera HAL\n+ *      module does not need to invoke\n+ *      camera_module_callbacks::torch_mode_status_change() separately to inform\n+ *      that the flash unit has become unavailable.\n+ *\n+ *  3. open() is called to open a camera device.\n+ *      The camera HAL module must invoke\n+ *      camera_module_callbacks::torch_mode_status_change() for all flash units\n+ *      that have entered TORCH_MODE_STATUS_NOT_AVAILABLE state and can not be\n+ *      turned on by calling set_torch_mode() anymore due to this open() call.\n+ *      open() must not trigger TORCH_MODE_STATUS_AVAILABLE_OFF before\n+ *      TORCH_MODE_STATUS_NOT_AVAILABLE for all flash units that have become\n+ *      unavailable.\n+ *\n+ *  4. close() is called to close a camera device.\n+ *      The camera HAL module must invoke\n+ *      camera_module_callbacks::torch_mode_status_change() for all flash units\n+ *      that have entered TORCH_MODE_STATUS_AVAILABLE_OFF state and can be turned\n+ *      on by calling set_torch_mode() again because of enough resources freed\n+ *      up by this close() call.\n+ *\n+ *  Note that the framework calling set_torch_mode() successfully must trigger\n+ *  TORCH_MODE_STATUS_AVAILABLE_OFF or TORCH_MODE_STATUS_AVAILABLE_ON callback\n+ *  for the given camera device. Additionally it must trigger\n+ *  TORCH_MODE_STATUS_AVAILABLE_OFF callbacks for other previously-on torch\n+ *  modes if HAL cannot keep multiple torch modes on simultaneously.\n+ */\n+typedef enum torch_mode_status {\n+\n+    /**\n+     * The flash unit is no longer available and the torch mode can not be\n+     * turned on by calling set_torch_mode(). If the torch mode is on, it\n+     * will be turned off by HAL before HAL calls torch_mode_status_change().\n+     */\n+    TORCH_MODE_STATUS_NOT_AVAILABLE = 0,\n+\n+    /**\n+     * A torch mode has become off and available to be turned on via\n+     * set_torch_mode(). This may happen in the following\n+     * cases:\n+     *   1. After the resources to turn on the torch mode have become available.\n+     *   2. After set_torch_mode() is called to turn off the torch mode.\n+     *   3. After the framework turned on the torch mode of some other camera\n+     *      device and HAL had to turn off the torch modes of any camera devices\n+     *      that were previously on.\n+     */\n+    TORCH_MODE_STATUS_AVAILABLE_OFF = 1,\n+\n+    /**\n+     * A torch mode has become on and available to be turned off via\n+     * set_torch_mode(). This can happen only after set_torch_mode() is called\n+     * to turn on the torch mode.\n+     */\n+    TORCH_MODE_STATUS_AVAILABLE_ON = 2,\n+\n+} torch_mode_status_t;\n+\n+/**\n+ * Callback functions for the camera HAL module to use to inform the framework\n+ * of changes to the camera subsystem.\n+ *\n+ * Version information (based on camera_module_t.common.module_api_version):\n+ *\n+ * Each callback is called only by HAL modules implementing the indicated\n+ * version or higher of the HAL module API interface.\n+ *\n+ *  CAMERA_MODULE_API_VERSION_2_1:\n+ *    camera_device_status_change()\n+ *\n+ *  CAMERA_MODULE_API_VERSION_2_4:\n+ *    torch_mode_status_change()\n+\n+ */\n+typedef struct camera_module_callbacks {\n+\n+    /**\n+     * camera_device_status_change:\n+     *\n+     * Callback to the framework to indicate that the state of a specific camera\n+     * device has changed. At module load time, the framework will assume all\n+     * camera devices are in the CAMERA_DEVICE_STATUS_PRESENT state. The HAL\n+     * must call this method to inform the framework of any initially\n+     * NOT_PRESENT devices.\n+     *\n+     * This callback is added for CAMERA_MODULE_API_VERSION_2_1.\n+     *\n+     * camera_module_callbacks: The instance of camera_module_callbacks_t passed\n+     *   to the module with set_callbacks.\n+     *\n+     * camera_id: The ID of the camera device that has a new status.\n+     *\n+     * new_status: The new status code, one of the camera_device_status_t enums,\n+     *   or a platform-specific status.\n+     *\n+     */\n+    void (*camera_device_status_change)(const struct camera_module_callbacks*,\n+            int camera_id,\n+            int new_status);\n+\n+    /**\n+     * torch_mode_status_change:\n+     *\n+     * Callback to the framework to indicate that the state of the torch mode\n+     * of the flash unit associated with a specific camera device has changed.\n+     * At module load time, the framework will assume the torch modes are in\n+     * the TORCH_MODE_STATUS_AVAILABLE_OFF state if android.flash.info.available\n+     * is reported as true via get_camera_info() call.\n+     *\n+     * This callback is added for CAMERA_MODULE_API_VERSION_2_4.\n+     *\n+     * camera_module_callbacks: The instance of camera_module_callbacks_t\n+     *   passed to the module with set_callbacks.\n+     *\n+     * camera_id: The ID of camera device whose flash unit has a new torch mode\n+     *   status.\n+     *\n+     * new_status: The new status code, one of the torch_mode_status_t enums.\n+     */\n+    void (*torch_mode_status_change)(const struct camera_module_callbacks*,\n+            const char* camera_id,\n+            int new_status);\n+\n+\n+} camera_module_callbacks_t;\n+\n+typedef struct camera_module {\n+    /**\n+     * Common methods of the camera module.  This *must* be the first member of\n+     * camera_module as users of this structure will cast a hw_module_t to\n+     * camera_module pointer in contexts where it's known the hw_module_t\n+     * references a camera_module.\n+     *\n+     * The return values for common.methods->open for camera_module are:\n+     *\n+     * 0:           On a successful open of the camera device.\n+     *\n+     * -ENODEV:     The camera device cannot be opened due to an internal\n+     *              error.\n+     *\n+     * -EINVAL:     The input arguments are invalid, i.e. the id is invalid,\n+     *              and/or the module is invalid.\n+     *\n+     * -EBUSY:      The camera device was already opened for this camera id\n+     *              (by using this method or open_legacy),\n+     *              regardless of the device HAL version it was opened as.\n+     *\n+     * -EUSERS:     The maximal number of camera devices that can be\n+     *              opened concurrently were opened already, either by\n+     *              this method or the open_legacy method.\n+     *\n+     * All other return values from common.methods->open will be treated as\n+     * -ENODEV.\n+     */\n+    hw_module_t common;\n+\n+    /**\n+     * get_number_of_cameras:\n+     *\n+     * Returns the number of camera devices accessible through the camera\n+     * module.  The camera devices are numbered 0 through N-1, where N is the\n+     * value returned by this call. The name of the camera device for open() is\n+     * simply the number converted to a string. That is, \"0\" for camera ID 0,\n+     * \"1\" for camera ID 1.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_3 or lower:\n+     *\n+     *   The value here must be static, and cannot change after the first call\n+     *   to this method.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4 or higher:\n+     *\n+     *   The value here must be static, and must count only built-in cameras,\n+     *   which have CAMERA_FACING_BACK or CAMERA_FACING_FRONT camera facing values\n+     *   (camera_info.facing). The HAL must not include the external cameras\n+     *   (camera_info.facing == CAMERA_FACING_EXTERNAL) into the return value\n+     *   of this call. Frameworks will use camera_device_status_change callback\n+     *   to manage number of external cameras.\n+     */\n+    int (*get_number_of_cameras)(void);\n+\n+    /**\n+     * get_camera_info:\n+     *\n+     * Return the static camera information for a given camera device. This\n+     * information may not change for a camera device.\n+     *\n+     * Return values:\n+     *\n+     * 0:           On a successful operation\n+     *\n+     * -ENODEV:     The information cannot be provided due to an internal\n+     *              error.\n+     *\n+     * -EINVAL:     The input arguments are invalid, i.e. the id is invalid,\n+     *              and/or the module is invalid.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4 or higher:\n+     *\n+     *   When a camera is disconnected, its camera id becomes invalid. Calling this\n+     *   this method with this invalid camera id will get -EINVAL and NULL camera\n+     *   static metadata (camera_info.static_camera_characteristics).\n+     */\n+    int (*get_camera_info)(int camera_id, struct camera_info *info);\n+\n+    /**\n+     * set_callbacks:\n+     *\n+     * Provide callback function pointers to the HAL module to inform framework\n+     * of asynchronous camera module events. The framework will call this\n+     * function once after initial camera HAL module load, after the\n+     * get_number_of_cameras() method is called for the first time, and before\n+     * any other calls to the module.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     *  CAMERA_MODULE_API_VERSION_1_0, CAMERA_MODULE_API_VERSION_2_0:\n+     *\n+     *    Not provided by HAL module. Framework may not call this function.\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_1:\n+     *\n+     *    Valid to be called by the framework.\n+     *\n+     * Return values:\n+     *\n+     * 0:           On a successful operation\n+     *\n+     * -ENODEV:     The operation cannot be completed due to an internal\n+     *              error.\n+     *\n+     * -EINVAL:     The input arguments are invalid, i.e. the callbacks are\n+     *              null\n+     */\n+    int (*set_callbacks)(const camera_module_callbacks_t *callbacks);\n+\n+    /**\n+     * get_vendor_tag_ops:\n+     *\n+     * Get methods to query for vendor extension metadata tag information. The\n+     * HAL should fill in all the vendor tag operation methods, or leave ops\n+     * unchanged if no vendor tags are defined.\n+     *\n+     * The vendor_tag_ops structure used here is defined in:\n+     * system/media/camera/include/system/vendor_tags.h\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     *  CAMERA_MODULE_API_VERSION_1_x/2_0/2_1:\n+     *    Not provided by HAL module. Framework may not call this function.\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_2:\n+     *    Valid to be called by the framework.\n+     */\n+    void (*get_vendor_tag_ops)(vendor_tag_ops_t* ops);\n+\n+    /**\n+     * open_legacy:\n+     *\n+     * Open a specific legacy camera HAL device if multiple device HAL API\n+     * versions are supported by this camera HAL module. For example, if the\n+     * camera module supports both CAMERA_DEVICE_API_VERSION_1_0 and\n+     * CAMERA_DEVICE_API_VERSION_3_2 device API for the same camera id,\n+     * framework can call this function to open the camera device as\n+     * CAMERA_DEVICE_API_VERSION_1_0 device.\n+     *\n+     * This is an optional method. A Camera HAL module does not need to support\n+     * more than one device HAL version per device, and such modules may return\n+     * -ENOSYS for all calls to this method. For all older HAL device API\n+     * versions that are not supported, it may return -EOPNOTSUPP. When above\n+     * cases occur, The normal open() method (common.methods->open) will be\n+     * used by the framework instead.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     *  CAMERA_MODULE_API_VERSION_1_x/2_0/2_1/2_2:\n+     *    Not provided by HAL module. Framework will not call this function.\n+     *\n+     *  CAMERA_MODULE_API_VERSION_2_3:\n+     *    Valid to be called by the framework.\n+     *\n+     * Return values:\n+     *\n+     * 0:           On a successful open of the camera device.\n+     *\n+     * -ENOSYS      This method is not supported.\n+     *\n+     * -EOPNOTSUPP: The requested HAL version is not supported by this method.\n+     *\n+     * -EINVAL:     The input arguments are invalid, i.e. the id is invalid,\n+     *              and/or the module is invalid.\n+     *\n+     * -EBUSY:      The camera device was already opened for this camera id\n+     *              (by using this method or common.methods->open method),\n+     *              regardless of the device HAL version it was opened as.\n+     *\n+     * -EUSERS:     The maximal number of camera devices that can be\n+     *              opened concurrently were opened already, either by\n+     *              this method or common.methods->open method.\n+     */\n+    int (*open_legacy)(const struct hw_module_t* module, const char* id,\n+            uint32_t halVersion, struct hw_device_t** device);\n+\n+    /**\n+     * set_torch_mode:\n+     *\n+     * Turn on or off the torch mode of the flash unit associated with a given\n+     * camera ID. If the operation is successful, HAL must notify the framework\n+     * torch state by invoking\n+     * camera_module_callbacks.torch_mode_status_change() with the new state.\n+     *\n+     * The camera device has a higher priority accessing the flash unit. When\n+     * there are any resource conflicts, such as open() is called to open a\n+     * camera device, HAL module must notify the framework through\n+     * camera_module_callbacks.torch_mode_status_change() that the\n+     * torch mode has been turned off and the torch mode state has become\n+     * TORCH_MODE_STATUS_NOT_AVAILABLE. When resources to turn on torch mode\n+     * become available again, HAL module must notify the framework through\n+     * camera_module_callbacks.torch_mode_status_change() that the torch mode\n+     * state has become TORCH_MODE_STATUS_AVAILABLE_OFF for set_torch_mode() to\n+     * be called.\n+     *\n+     * When the framework calls set_torch_mode() to turn on the torch mode of a\n+     * flash unit, if HAL cannot keep multiple torch modes on simultaneously,\n+     * HAL should turn off the torch mode that was turned on by\n+     * a previous set_torch_mode() call and notify the framework that the torch\n+     * mode state of that flash unit has become TORCH_MODE_STATUS_AVAILABLE_OFF.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     * CAMERA_MODULE_API_VERSION_1_x/2_0/2_1/2_2/2_3:\n+     *   Not provided by HAL module. Framework will not call this function.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4:\n+     *   Valid to be called by the framework.\n+     *\n+     * Return values:\n+     *\n+     * 0:           On a successful operation.\n+     *\n+     * -ENOSYS:     The camera device does not support this operation. It is\n+     *              returned if and only if android.flash.info.available is\n+     *              false.\n+     *\n+     * -EBUSY:      The camera device is already in use.\n+     *\n+     * -EUSERS:     The resources needed to turn on the torch mode are not\n+     *              available, typically because other camera devices are\n+     *              holding the resources to make using the flash unit not\n+     *              possible.\n+     *\n+     * -EINVAL:     camera_id is invalid.\n+     *\n+     */\n+    int (*set_torch_mode)(const char* camera_id, bool enabled);\n+\n+    /**\n+     * init:\n+     *\n+     * This method is called by the camera service before any other methods\n+     * are invoked, right after the camera HAL library has been successfully\n+     * loaded. It may be left as NULL by the HAL module, if no initialization\n+     * in needed.\n+     *\n+     * It can be used by HAL implementations to perform initialization and\n+     * other one-time operations.\n+     *\n+     * Version information (based on camera_module_t.common.module_api_version):\n+     *\n+     * CAMERA_MODULE_API_VERSION_1_x/2_0/2_1/2_2/2_3:\n+     *   Not provided by HAL module. Framework will not call this function.\n+     *\n+     * CAMERA_MODULE_API_VERSION_2_4:\n+     *   If not NULL, will always be called by the framework once after the HAL\n+     *   module is loaded, before any other HAL module method is called.\n+     *\n+     * Return values:\n+     *\n+     * 0:           On a successful operation.\n+     *\n+     * -ENODEV:     Initialization cannot be completed due to an internal\n+     *              error. The HAL must be assumed to be in a nonfunctional\n+     *              state.\n+     *\n+     */\n+    int (*init)();\n+\n+    /* reserved for future use */\n+    void* reserved[5];\n+} camera_module_t;\n+\n+__END_DECLS\n+\n+#endif /* ANDROID_INCLUDE_CAMERA_COMMON_H */\ndiff --git a/include/android/hardware/libhardware/include/hardware/fb.h b/include/android/hardware/libhardware/include/hardware/fb.h\nnew file mode 100644\nindex 000000000000..9df94165b9b1\n--- /dev/null\n+++ b/include/android/hardware/libhardware/include/hardware/fb.h\n@@ -0,0 +1,173 @@\n+/*\n+ * Copyright (C) 2008 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+\n+#ifndef ANDROID_FB_INTERFACE_H\n+#define ANDROID_FB_INTERFACE_H\n+\n+#include <stdint.h>\n+#include <sys/cdefs.h>\n+#include <sys/types.h>\n+\n+#include <cutils/native_handle.h>\n+\n+#include <hardware/hardware.h>\n+\n+__BEGIN_DECLS\n+\n+#define GRALLOC_HARDWARE_FB0 \"fb0\"\n+\n+/*****************************************************************************/\n+\n+\n+/*****************************************************************************/\n+\n+typedef struct framebuffer_device_t {\n+    /**\n+     * Common methods of the framebuffer device.  This *must* be the first member of\n+     * framebuffer_device_t as users of this structure will cast a hw_device_t to\n+     * framebuffer_device_t pointer in contexts where it's known the hw_device_t references a\n+     * framebuffer_device_t.\n+     */\n+    struct hw_device_t common;\n+\n+    /* flags describing some attributes of the framebuffer */\n+    const uint32_t  flags;\n+\n+    /* dimensions of the framebuffer in pixels */\n+    const uint32_t  width;\n+    const uint32_t  height;\n+\n+    /* frambuffer stride in pixels */\n+    const int       stride;\n+\n+    /* framebuffer pixel format */\n+    const int       format;\n+\n+    /* resolution of the framebuffer's display panel in pixel per inch*/\n+    const float     xdpi;\n+    const float     ydpi;\n+\n+    /* framebuffer's display panel refresh rate in frames per second */\n+    const float     fps;\n+\n+    /* min swap interval supported by this framebuffer */\n+    const int       minSwapInterval;\n+\n+    /* max swap interval supported by this framebuffer */\n+    const int       maxSwapInterval;\n+\n+    /* Number of framebuffers supported*/\n+    const int       numFramebuffers;\n+\n+    int reserved[7];\n+\n+    /*\n+     * requests a specific swap-interval (same definition than EGL)\n+     *\n+     * Returns 0 on success or -errno on error.\n+     */\n+    int (*setSwapInterval)(struct framebuffer_device_t* window,\n+            int interval);\n+\n+    /*\n+     * This hook is OPTIONAL.\n+     *\n+     * It is non NULL If the framebuffer driver supports \"update-on-demand\"\n+     * and the given rectangle is the area of the screen that gets\n+     * updated during (*post)().\n+     *\n+     * This is useful on devices that are able to DMA only a portion of\n+     * the screen to the display panel, upon demand -- as opposed to\n+     * constantly refreshing the panel 60 times per second, for instance.\n+     *\n+     * Only the area defined by this rectangle is guaranteed to be valid, that\n+     * is, the driver is not allowed to post anything outside of this\n+     * rectangle.\n+     *\n+     * The rectangle evaluated during (*post)() and specifies which area\n+     * of the buffer passed in (*post)() shall to be posted.\n+     *\n+     * return -EINVAL if width or height <=0, or if left or top < 0\n+     */\n+    int (*setUpdateRect)(struct framebuffer_device_t* window,\n+            int left, int top, int width, int height);\n+\n+    /*\n+     * Post <buffer> to the display (display it on the screen)\n+     * The buffer must have been allocated with the\n+     *   GRALLOC_USAGE_HW_FB usage flag.\n+     * buffer must be the same width and height as the display and must NOT\n+     * be locked.\n+     *\n+     * The buffer is shown during the next VSYNC.\n+     *\n+     * If the same buffer is posted again (possibly after some other buffer),\n+     * post() will block until the the first post is completed.\n+     *\n+     * Internally, post() is expected to lock the buffer so that a\n+     * subsequent call to gralloc_module_t::(*lock)() with USAGE_RENDER or\n+     * USAGE_*_WRITE will block until it is safe; that is typically once this\n+     * buffer is shown and another buffer has been posted.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     */\n+    int (*post)(struct framebuffer_device_t* dev, buffer_handle_t buffer);\n+\n+\n+    /*\n+     * The (*compositionComplete)() method must be called after the\n+     * compositor has finished issuing GL commands for client buffers.\n+     */\n+\n+    int (*compositionComplete)(struct framebuffer_device_t* dev);\n+\n+    /*\n+     * This hook is OPTIONAL.\n+     *\n+     * If non NULL it will be caused by SurfaceFlinger on dumpsys\n+     */\n+    void (*dump)(struct framebuffer_device_t* dev, char *buff, int buff_len);\n+\n+    /*\n+     * (*enableScreen)() is used to either blank (enable=0) or\n+     * unblank (enable=1) the screen this framebuffer is attached to.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     */\n+    int (*enableScreen)(struct framebuffer_device_t* dev, int enable);\n+\n+    void* reserved_proc[6];\n+\n+} framebuffer_device_t;\n+\n+\n+/** convenience API for opening and closing a supported device */\n+\n+static inline int framebuffer_open(const struct hw_module_t* module,\n+        struct framebuffer_device_t** device) {\n+    return module->methods->open(module,\n+            GRALLOC_HARDWARE_FB0, (struct hw_device_t**)device);\n+}\n+\n+static inline int framebuffer_close(struct framebuffer_device_t* device) {\n+    return device->common.close(&device->common);\n+}\n+\n+\n+__END_DECLS\n+\n+#endif  // ANDROID_FB_INTERFACE_H\ndiff --git a/include/android/hardware/libhardware/include/hardware/gralloc.h b/include/android/hardware/libhardware/include/hardware/gralloc.h\nnew file mode 100644\nindex 000000000000..ef86f9039395\n--- /dev/null\n+++ b/include/android/hardware/libhardware/include/hardware/gralloc.h\n@@ -0,0 +1,384 @@\n+/*\n+ * Copyright (C) 2008 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+\n+#ifndef ANDROID_GRALLOC_INTERFACE_H\n+#define ANDROID_GRALLOC_INTERFACE_H\n+\n+#include <system/window.h>\n+#include <system/graphics.h>\n+#include <hardware/hardware.h>\n+\n+#include <stdint.h>\n+#include <sys/cdefs.h>\n+#include <sys/types.h>\n+\n+#include <cutils/native_handle.h>\n+\n+#include <hardware/hardware.h>\n+#include <hardware/fb.h>\n+\n+__BEGIN_DECLS\n+\n+/**\n+ * Module versioning information for the Gralloc hardware module, based on\n+ * gralloc_module_t.common.module_api_version.\n+ *\n+ * Version History:\n+ *\n+ * GRALLOC_MODULE_API_VERSION_0_1:\n+ * Initial Gralloc hardware module API.\n+ *\n+ * GRALLOC_MODULE_API_VERSION_0_2:\n+ * Add support for flexible YCbCr format with (*lock_ycbcr)() method.\n+ *\n+ * GRALLOC_MODULE_API_VERSION_0_3:\n+ * Add support for fence passing to/from lock/unlock.\n+ */\n+\n+#define GRALLOC_MODULE_API_VERSION_0_1  HARDWARE_MODULE_API_VERSION(0, 1)\n+#define GRALLOC_MODULE_API_VERSION_0_2  HARDWARE_MODULE_API_VERSION(0, 2)\n+#define GRALLOC_MODULE_API_VERSION_0_3  HARDWARE_MODULE_API_VERSION(0, 3)\n+\n+#define GRALLOC_DEVICE_API_VERSION_0_1  HARDWARE_DEVICE_API_VERSION(0, 1)\n+\n+/**\n+ * The id of this module\n+ */\n+#define GRALLOC_HARDWARE_MODULE_ID \"gralloc\"\n+\n+/**\n+ * Name of the graphics device to open\n+ */\n+\n+#define GRALLOC_HARDWARE_GPU0 \"gpu0\"\n+\n+enum {\n+    /* buffer is never read in software */\n+    GRALLOC_USAGE_SW_READ_NEVER         = 0x00000000,\n+    /* buffer is rarely read in software */\n+    GRALLOC_USAGE_SW_READ_RARELY        = 0x00000002,\n+    /* buffer is often read in software */\n+    GRALLOC_USAGE_SW_READ_OFTEN         = 0x00000003,\n+    /* mask for the software read values */\n+    GRALLOC_USAGE_SW_READ_MASK          = 0x0000000F,\n+\n+    /* buffer is never written in software */\n+    GRALLOC_USAGE_SW_WRITE_NEVER        = 0x00000000,\n+    /* buffer is rarely written in software */\n+    GRALLOC_USAGE_SW_WRITE_RARELY       = 0x00000020,\n+    /* buffer is often written in software */\n+    GRALLOC_USAGE_SW_WRITE_OFTEN        = 0x00000030,\n+    /* mask for the software write values */\n+    GRALLOC_USAGE_SW_WRITE_MASK         = 0x000000F0,\n+\n+    /* buffer will be used as an OpenGL ES texture */\n+    GRALLOC_USAGE_HW_TEXTURE            = 0x00000100,\n+    /* buffer will be used as an OpenGL ES render target */\n+    GRALLOC_USAGE_HW_RENDER             = 0x00000200,\n+    /* buffer will be used by the 2D hardware blitter */\n+    GRALLOC_USAGE_HW_2D                 = 0x00000400,\n+    /* buffer will be used by the HWComposer HAL module */\n+    GRALLOC_USAGE_HW_COMPOSER           = 0x00000800,\n+    /* buffer will be used with the framebuffer device */\n+    GRALLOC_USAGE_HW_FB                 = 0x00001000,\n+\n+    /* buffer should be displayed full-screen on an external display when\n+     * possible */\n+    GRALLOC_USAGE_EXTERNAL_DISP         = 0x00002000,\n+\n+    /* Must have a hardware-protected path to external display sink for\n+     * this buffer.  If a hardware-protected path is not available, then\n+     * either don't composite only this buffer (preferred) to the\n+     * external sink, or (less desirable) do not route the entire\n+     * composition to the external sink.  */\n+    GRALLOC_USAGE_PROTECTED             = 0x00004000,\n+\n+    /* buffer may be used as a cursor */\n+    GRALLOC_USAGE_CURSOR                = 0x00008000,\n+\n+    /* buffer will be used with the HW video encoder */\n+    GRALLOC_USAGE_HW_VIDEO_ENCODER      = 0x00010000,\n+    /* buffer will be written by the HW camera pipeline */\n+    GRALLOC_USAGE_HW_CAMERA_WRITE       = 0x00020000,\n+    /* buffer will be read by the HW camera pipeline */\n+    GRALLOC_USAGE_HW_CAMERA_READ        = 0x00040000,\n+    /* buffer will be used as part of zero-shutter-lag queue */\n+    GRALLOC_USAGE_HW_CAMERA_ZSL         = 0x00060000,\n+    /* mask for the camera access values */\n+    GRALLOC_USAGE_HW_CAMERA_MASK        = 0x00060000,\n+    /* mask for the software usage bit-mask */\n+    GRALLOC_USAGE_HW_MASK               = 0x00071F00,\n+\n+    /* buffer will be used as a RenderScript Allocation */\n+    GRALLOC_USAGE_RENDERSCRIPT          = 0x00100000,\n+\n+    /* Set by the consumer to indicate to the producer that they may attach a\n+     * buffer that they did not detach from the BufferQueue. Will be filtered\n+     * out by GRALLOC_USAGE_ALLOC_MASK, so gralloc modules will not need to\n+     * handle this flag. */\n+    GRALLOC_USAGE_FOREIGN_BUFFERS       = 0x00200000,\n+\n+    /* Mask of all flags which could be passed to a gralloc module for buffer\n+     * allocation. Any flags not in this mask do not need to be handled by\n+     * gralloc modules. */\n+    GRALLOC_USAGE_ALLOC_MASK            = ~(GRALLOC_USAGE_FOREIGN_BUFFERS),\n+\n+    /* implementation-specific private usage flags */\n+    GRALLOC_USAGE_PRIVATE_0             = 0x10000000,\n+    GRALLOC_USAGE_PRIVATE_1             = 0x20000000,\n+    GRALLOC_USAGE_PRIVATE_2             = 0x40000000,\n+    GRALLOC_USAGE_PRIVATE_3             = 0x80000000,\n+    GRALLOC_USAGE_PRIVATE_MASK          = 0xF0000000,\n+};\n+\n+/*****************************************************************************/\n+\n+/**\n+ * Every hardware module must have a data structure named HAL_MODULE_INFO_SYM\n+ * and the fields of this data structure must begin with hw_module_t\n+ * followed by module specific information.\n+ */\n+typedef struct gralloc_module_t {\n+    struct hw_module_t common;\n+    \n+    /*\n+     * (*registerBuffer)() must be called before a buffer_handle_t that has not\n+     * been created with (*alloc_device_t::alloc)() can be used.\n+     * \n+     * This is intended to be used with buffer_handle_t's that have been\n+     * received in this process through IPC.\n+     * \n+     * This function checks that the handle is indeed a valid one and prepares\n+     * it for use with (*lock)() and (*unlock)().\n+     * \n+     * It is not necessary to call (*registerBuffer)() on a handle created \n+     * with (*alloc_device_t::alloc)().\n+     * \n+     * returns an error if this buffer_handle_t is not valid.\n+     */\n+    int (*registerBuffer)(struct gralloc_module_t const* module,\n+            buffer_handle_t handle);\n+\n+    /*\n+     * (*unregisterBuffer)() is called once this handle is no longer needed in\n+     * this process. After this call, it is an error to call (*lock)(),\n+     * (*unlock)(), or (*registerBuffer)().\n+     * \n+     * This function doesn't close or free the handle itself; this is done\n+     * by other means, usually through libcutils's native_handle_close() and\n+     * native_handle_free(). \n+     * \n+     * It is an error to call (*unregisterBuffer)() on a buffer that wasn't\n+     * explicitly registered first.\n+     */\n+    int (*unregisterBuffer)(struct gralloc_module_t const* module,\n+            buffer_handle_t handle);\n+    \n+    /*\n+     * The (*lock)() method is called before a buffer is accessed for the \n+     * specified usage. This call may block, for instance if the h/w needs\n+     * to finish rendering or if CPU caches need to be synchronized.\n+     * \n+     * The caller promises to modify only pixels in the area specified \n+     * by (l,t,w,h).\n+     * \n+     * The content of the buffer outside of the specified area is NOT modified\n+     * by this call.\n+     *\n+     * If usage specifies GRALLOC_USAGE_SW_*, vaddr is filled with the address\n+     * of the buffer in virtual memory.\n+     *\n+     * Note calling (*lock)() on HAL_PIXEL_FORMAT_YCbCr_*_888 buffers will fail\n+     * and return -EINVAL.  These buffers must be locked with (*lock_ycbcr)()\n+     * instead.\n+     *\n+     * THREADING CONSIDERATIONS:\n+     *\n+     * It is legal for several different threads to lock a buffer from \n+     * read access, none of the threads are blocked.\n+     * \n+     * However, locking a buffer simultaneously for write or read/write is\n+     * undefined, but:\n+     * - shall not result in termination of the process\n+     * - shall not block the caller\n+     * It is acceptable to return an error or to leave the buffer's content\n+     * into an indeterminate state.\n+     *\n+     * If the buffer was created with a usage mask incompatible with the\n+     * requested usage flags here, -EINVAL is returned. \n+     * \n+     */\n+    \n+    int (*lock)(struct gralloc_module_t const* module,\n+            buffer_handle_t handle, int usage,\n+            int l, int t, int w, int h,\n+            void** vaddr);\n+\n+    \n+    /*\n+     * The (*unlock)() method must be called after all changes to the buffer\n+     * are completed.\n+     */\n+    \n+    int (*unlock)(struct gralloc_module_t const* module,\n+            buffer_handle_t handle);\n+\n+\n+    /* reserved for future use */\n+    int (*perform)(struct gralloc_module_t const* module,\n+            int operation, ... );\n+\n+    /*\n+     * The (*lock_ycbcr)() method is like the (*lock)() method, with the\n+     * difference that it fills a struct ycbcr with a description of the buffer\n+     * layout, and zeroes out the reserved fields.\n+     *\n+     * If the buffer format is not compatible with a flexible YUV format (e.g.\n+     * the buffer layout cannot be represented with the ycbcr struct), it\n+     * will return -EINVAL.\n+     *\n+     * This method must work on buffers with HAL_PIXEL_FORMAT_YCbCr_*_888\n+     * if supported by the device, as well as with any other format that is\n+     * requested by the multimedia codecs when they are configured with a\n+     * flexible-YUV-compatible color-format with android native buffers.\n+     *\n+     * Note that this method may also be called on buffers of other formats,\n+     * including non-YUV formats.\n+     *\n+     * Added in GRALLOC_MODULE_API_VERSION_0_2.\n+     */\n+\n+    int (*lock_ycbcr)(struct gralloc_module_t const* module,\n+            buffer_handle_t handle, int usage,\n+            int l, int t, int w, int h,\n+            struct android_ycbcr *ycbcr);\n+\n+    /*\n+     * The (*lockAsync)() method is like the (*lock)() method except\n+     * that the buffer's sync fence object is passed into the lock\n+     * call instead of requiring the caller to wait for completion.\n+     *\n+     * The gralloc implementation takes ownership of the fenceFd and\n+     * is responsible for closing it when no longer needed.\n+     *\n+     * Added in GRALLOC_MODULE_API_VERSION_0_3.\n+     */\n+    int (*lockAsync)(struct gralloc_module_t const* module,\n+            buffer_handle_t handle, int usage,\n+            int l, int t, int w, int h,\n+            void** vaddr, int fenceFd);\n+\n+    /*\n+     * The (*unlockAsync)() method is like the (*unlock)() method\n+     * except that a buffer sync fence object is returned from the\n+     * lock call, representing the completion of any pending work\n+     * performed by the gralloc implementation.\n+     *\n+     * The caller takes ownership of the fenceFd and is responsible\n+     * for closing it when no longer needed.\n+     *\n+     * Added in GRALLOC_MODULE_API_VERSION_0_3.\n+     */\n+    int (*unlockAsync)(struct gralloc_module_t const* module,\n+            buffer_handle_t handle, int* fenceFd);\n+\n+    /*\n+     * The (*lockAsync_ycbcr)() method is like the (*lock_ycbcr)()\n+     * method except that the buffer's sync fence object is passed\n+     * into the lock call instead of requiring the caller to wait for\n+     * completion.\n+     *\n+     * The gralloc implementation takes ownership of the fenceFd and\n+     * is responsible for closing it when no longer needed.\n+     *\n+     * Added in GRALLOC_MODULE_API_VERSION_0_3.\n+     */\n+    int (*lockAsync_ycbcr)(struct gralloc_module_t const* module,\n+            buffer_handle_t handle, int usage,\n+            int l, int t, int w, int h,\n+            struct android_ycbcr *ycbcr, int fenceFd);\n+\n+    /* reserved for future use */\n+    void* reserved_proc[3];\n+} gralloc_module_t;\n+\n+/*****************************************************************************/\n+\n+/**\n+ * Every device data structure must begin with hw_device_t\n+ * followed by module specific public methods and attributes.\n+ */\n+\n+typedef struct alloc_device_t {\n+    struct hw_device_t common;\n+\n+    /* \n+     * (*alloc)() Allocates a buffer in graphic memory with the requested\n+     * parameters and returns a buffer_handle_t and the stride in pixels to\n+     * allow the implementation to satisfy hardware constraints on the width\n+     * of a pixmap (eg: it may have to be multiple of 8 pixels). \n+     * The CALLER TAKES OWNERSHIP of the buffer_handle_t.\n+     *\n+     * If format is HAL_PIXEL_FORMAT_YCbCr_420_888, the returned stride must be\n+     * 0, since the actual strides are available from the android_ycbcr\n+     * structure.\n+     * \n+     * Returns 0 on success or -errno on error.\n+     */\n+    \n+    int (*alloc)(struct alloc_device_t* dev,\n+            int w, int h, int format, int usage,\n+            buffer_handle_t* handle, int* stride);\n+\n+    /*\n+     * (*free)() Frees a previously allocated buffer. \n+     * Behavior is undefined if the buffer is still mapped in any process,\n+     * but shall not result in termination of the program or security breaches\n+     * (allowing a process to get access to another process' buffers).\n+     * THIS FUNCTION TAKES OWNERSHIP of the buffer_handle_t which becomes\n+     * invalid after the call. \n+     * \n+     * Returns 0 on success or -errno on error.\n+     */\n+    int (*free)(struct alloc_device_t* dev,\n+            buffer_handle_t handle);\n+\n+    /* This hook is OPTIONAL.\n+     *\n+     * If non NULL it will be caused by SurfaceFlinger on dumpsys\n+     */\n+    void (*dump)(struct alloc_device_t *dev, char *buff, int buff_len);\n+\n+    void* reserved_proc[7];\n+} alloc_device_t;\n+\n+\n+/** convenience API for opening and closing a supported device */\n+\n+static inline int gralloc_open(const struct hw_module_t* module, \n+        struct alloc_device_t** device) {\n+    return module->methods->open(module, \n+            GRALLOC_HARDWARE_GPU0, (struct hw_device_t**)device);\n+}\n+\n+static inline int gralloc_close(struct alloc_device_t* device) {\n+    return device->common.close(&device->common);\n+}\n+\n+__END_DECLS\n+\n+#endif  // ANDROID_GRALLOC_INTERFACE_H\ndiff --git a/include/android/hardware/libhardware/include/hardware/hardware.h b/include/android/hardware/libhardware/include/hardware/hardware.h\nnew file mode 100644\nindex 000000000000..74f57aa4c28f\n--- /dev/null\n+++ b/include/android/hardware/libhardware/include/hardware/hardware.h\n@@ -0,0 +1,238 @@\n+/*\n+ * Copyright (C) 2008 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef ANDROID_INCLUDE_HARDWARE_HARDWARE_H\n+#define ANDROID_INCLUDE_HARDWARE_HARDWARE_H\n+\n+#include <stdint.h>\n+#include <sys/cdefs.h>\n+\n+#include <cutils/native_handle.h>\n+#include <system/graphics.h>\n+\n+__BEGIN_DECLS\n+\n+/*\n+ * Value for the hw_module_t.tag field\n+ */\n+\n+#define MAKE_TAG_CONSTANT(A,B,C,D) (((A) << 24) | ((B) << 16) | ((C) << 8) | (D))\n+\n+#define HARDWARE_MODULE_TAG MAKE_TAG_CONSTANT('H', 'W', 'M', 'T')\n+#define HARDWARE_DEVICE_TAG MAKE_TAG_CONSTANT('H', 'W', 'D', 'T')\n+\n+#define HARDWARE_MAKE_API_VERSION(maj,min) \\\n+            ((((maj) & 0xff) << 8) | ((min) & 0xff))\n+\n+#define HARDWARE_MAKE_API_VERSION_2(maj,min,hdr) \\\n+            ((((maj) & 0xff) << 24) | (((min) & 0xff) << 16) | ((hdr) & 0xffff))\n+#define HARDWARE_API_VERSION_2_MAJ_MIN_MASK 0xffff0000\n+#define HARDWARE_API_VERSION_2_HEADER_MASK  0x0000ffff\n+\n+\n+/*\n+ * The current HAL API version.\n+ *\n+ * All module implementations must set the hw_module_t.hal_api_version field\n+ * to this value when declaring the module with HAL_MODULE_INFO_SYM.\n+ *\n+ * Note that previous implementations have always set this field to 0.\n+ * Therefore, libhardware HAL API will always consider versions 0.0 and 1.0\n+ * to be 100% binary compatible.\n+ *\n+ */\n+#define HARDWARE_HAL_API_VERSION HARDWARE_MAKE_API_VERSION(1, 0)\n+\n+/*\n+ * Helper macros for module implementors.\n+ *\n+ * The derived modules should provide convenience macros for supported\n+ * versions so that implementations can explicitly specify module/device\n+ * versions at definition time.\n+ *\n+ * Use this macro to set the hw_module_t.module_api_version field.\n+ */\n+#define HARDWARE_MODULE_API_VERSION(maj,min) HARDWARE_MAKE_API_VERSION(maj,min)\n+#define HARDWARE_MODULE_API_VERSION_2(maj,min,hdr) HARDWARE_MAKE_API_VERSION_2(maj,min,hdr)\n+\n+/*\n+ * Use this macro to set the hw_device_t.version field\n+ */\n+#define HARDWARE_DEVICE_API_VERSION(maj,min) HARDWARE_MAKE_API_VERSION(maj,min)\n+#define HARDWARE_DEVICE_API_VERSION_2(maj,min,hdr) HARDWARE_MAKE_API_VERSION_2(maj,min,hdr)\n+\n+struct hw_module_t;\n+struct hw_module_methods_t;\n+struct hw_device_t;\n+\n+/**\n+ * Every hardware module must have a data structure named HAL_MODULE_INFO_SYM\n+ * and the fields of this data structure must begin with hw_module_t\n+ * followed by module specific information.\n+ */\n+typedef struct hw_module_t {\n+    /** tag must be initialized to HARDWARE_MODULE_TAG */\n+    uint32_t tag;\n+\n+    /**\n+     * The API version of the implemented module. The module owner is\n+     * responsible for updating the version when a module interface has\n+     * changed.\n+     *\n+     * The derived modules such as gralloc and audio own and manage this field.\n+     * The module user must interpret the version field to decide whether or\n+     * not to inter-operate with the supplied module implementation.\n+     * For example, SurfaceFlinger is responsible for making sure that\n+     * it knows how to manage different versions of the gralloc-module API,\n+     * and AudioFlinger must know how to do the same for audio-module API.\n+     *\n+     * The module API version should include a major and a minor component.\n+     * For example, version 1.0 could be represented as 0x0100. This format\n+     * implies that versions 0x0100-0x01ff are all API-compatible.\n+     *\n+     * In the future, libhardware will expose a hw_get_module_version()\n+     * (or equivalent) function that will take minimum/maximum supported\n+     * versions as arguments and would be able to reject modules with\n+     * versions outside of the supplied range.\n+     */\n+    uint16_t module_api_version;\n+#define version_major module_api_version\n+    /**\n+     * version_major/version_minor defines are supplied here for temporary\n+     * source code compatibility. They will be removed in the next version.\n+     * ALL clients must convert to the new version format.\n+     */\n+\n+    /**\n+     * The API version of the HAL module interface. This is meant to\n+     * version the hw_module_t, hw_module_methods_t, and hw_device_t\n+     * structures and definitions.\n+     *\n+     * The HAL interface owns this field. Module users/implementations\n+     * must NOT rely on this value for version information.\n+     *\n+     * Presently, 0 is the only valid value.\n+     */\n+    uint16_t hal_api_version;\n+#define version_minor hal_api_version\n+\n+    /** Identifier of module */\n+    const char *id;\n+\n+    /** Name of this module */\n+    const char *name;\n+\n+    /** Author/owner/implementor of the module */\n+    const char *author;\n+\n+    /** Modules methods */\n+    struct hw_module_methods_t* methods;\n+\n+    /** module's dso */\n+    void* dso;\n+\n+#ifdef __LP64__\n+    uint64_t reserved[32-7];\n+#else\n+    /** padding to 128 bytes, reserved for future use */\n+    uint32_t reserved[32-7];\n+#endif\n+\n+} hw_module_t;\n+\n+typedef struct hw_module_methods_t {\n+    /** Open a specific device */\n+    int (*open)(const struct hw_module_t* module, const char* id,\n+            struct hw_device_t** device);\n+\n+} hw_module_methods_t;\n+\n+/**\n+ * Every device data structure must begin with hw_device_t\n+ * followed by module specific public methods and attributes.\n+ */\n+typedef struct hw_device_t {\n+    /** tag must be initialized to HARDWARE_DEVICE_TAG */\n+    uint32_t tag;\n+\n+    /**\n+     * Version of the module-specific device API. This value is used by\n+     * the derived-module user to manage different device implementations.\n+     *\n+     * The module user is responsible for checking the module_api_version\n+     * and device version fields to ensure that the user is capable of\n+     * communicating with the specific module implementation.\n+     *\n+     * One module can support multiple devices with different versions. This\n+     * can be useful when a device interface changes in an incompatible way\n+     * but it is still necessary to support older implementations at the same\n+     * time. One such example is the Camera 2.0 API.\n+     *\n+     * This field is interpreted by the module user and is ignored by the\n+     * HAL interface itself.\n+     */\n+    uint32_t version;\n+\n+    /** reference to the module this device belongs to */\n+    struct hw_module_t* module;\n+\n+    /** padding reserved for future use */\n+#ifdef __LP64__\n+    uint64_t reserved[12];\n+#else\n+    uint32_t reserved[12];\n+#endif\n+\n+    /** Close this device */\n+    int (*close)(struct hw_device_t* device);\n+\n+} hw_device_t;\n+\n+/**\n+ * Name of the hal_module_info\n+ */\n+#define HAL_MODULE_INFO_SYM         HMI\n+\n+/**\n+ * Name of the hal_module_info as a string\n+ */\n+#define HAL_MODULE_INFO_SYM_AS_STR  \"HMI\"\n+\n+/**\n+ * Get the module info associated with a module by id.\n+ *\n+ * @return: 0 == success, <0 == error and *module == NULL\n+ */\n+int hw_get_module(const char *id, const struct hw_module_t **module);\n+\n+/**\n+ * Get the module info associated with a module instance by class 'class_id'\n+ * and instance 'inst'.\n+ *\n+ * Some modules types necessitate multiple instances. For example audio supports\n+ * multiple concurrent interfaces and thus 'audio' is the module class\n+ * and 'primary' or 'a2dp' are module interfaces. This implies that the files\n+ * providing these modules would be named audio.primary.<variant>.so and\n+ * audio.a2dp.<variant>.so\n+ *\n+ * @return: 0 == success, <0 == error and *module == NULL\n+ */\n+int hw_get_module_by_class(const char *class_id, const char *inst,\n+                           const struct hw_module_t **module);\n+\n+__END_DECLS\n+\n+#endif  /* ANDROID_INCLUDE_HARDWARE_HARDWARE_H */\ndiff --git a/include/android/metadata/camera_metadata_hidden.h b/include/android/metadata/camera_metadata_hidden.h\nnew file mode 100644\nindex 000000000000..31f1ad19e868\n--- /dev/null\n+++ b/include/android/metadata/camera_metadata_hidden.h\n@@ -0,0 +1,100 @@\n+/*\n+ * Copyright 2014 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H\n+#define SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H\n+\n+#include <system/camera_vendor_tags.h>\n+\n+/**\n+ * Error codes returned by vendor tags ops operations. These are intended\n+ * to be used by all framework code that uses the return values from the\n+ * vendor operations object.\n+ */\n+#define VENDOR_SECTION_NAME_ERR   NULL\n+#define VENDOR_TAG_NAME_ERR       NULL\n+#define VENDOR_TAG_COUNT_ERR      (-1)\n+#define VENDOR_TAG_TYPE_ERR       (-1)\n+\n+#ifdef __cplusplus\n+extern \"C\" {\n+#endif\n+/** **These are private functions for use only by the camera framework.** **/\n+\n+/**\n+ * Set the global vendor tag operations object used to define vendor tag\n+ * structure when parsing camera metadata with functions defined in\n+ * system/media/camera/include/camera_metadata.h.\n+ */\n+ANDROID_API\n+int set_camera_metadata_vendor_ops(const vendor_tag_ops_t *query_ops);\n+\n+/**\n+ * Set the global vendor tag cache operations object used to define vendor tag\n+ * structure when parsing camera metadata with functions defined in\n+ * system/media/camera/include/camera_metadata.h.\n+ */\n+ANDROID_API\n+int set_camera_metadata_vendor_cache_ops(\n+        const struct vendor_tag_cache_ops *query_cache_ops);\n+\n+/**\n+ * Set the vendor id for a particular metadata buffer.\n+ */\n+ANDROID_API\n+void set_camera_metadata_vendor_id(camera_metadata_t *meta,\n+        metadata_vendor_id_t id);\n+\n+/**\n+ * Retrieve the vendor id for a particular metadata buffer.\n+ */\n+ANDROID_API\n+metadata_vendor_id_t get_camera_metadata_vendor_id(\n+        const camera_metadata_t *meta);\n+\n+/**\n+ * Retrieve the type of a tag. Returns -1 if no such tag is defined.\n+ */\n+ANDROID_API\n+int get_local_camera_metadata_tag_type_vendor_id(uint32_t tag,\n+        metadata_vendor_id_t id);\n+\n+/**\n+ * Retrieve the name of a tag. Returns NULL if no such tag is defined.\n+ */\n+ANDROID_API\n+const char *get_local_camera_metadata_tag_name_vendor_id(uint32_t tag,\n+        metadata_vendor_id_t id);\n+\n+/**\n+ * Retrieve the name of a tag section. Returns NULL if no such tag is defined.\n+ */\n+ANDROID_API\n+const char *get_local_camera_metadata_section_name_vendor_id(uint32_t tag,\n+        metadata_vendor_id_t id);\n+\n+/**\n+ * Retrieve the type of a tag. Returns -1 if no such tag is defined.\n+ */\n+ANDROID_API\n+int get_local_camera_metadata_tag_type_vendor_id(uint32_t tag,\n+        metadata_vendor_id_t id);\n+\n+#ifdef __cplusplus\n+} /* extern \"C\" */\n+#endif\n+\n+#endif /* SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H */\ndiff --git a/include/android/metadata/system/camera_metadata.h b/include/android/metadata/system/camera_metadata.h\nnew file mode 100644\nindex 000000000000..46e7ac000298\n--- /dev/null\n+++ b/include/android/metadata/system/camera_metadata.h\n@@ -0,0 +1,580 @@\n+/*\n+ * Copyright (C) 2012 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_METADATA_H\n+#define SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_METADATA_H\n+\n+#include <string.h>\n+#include <stdint.h>\n+#include <cutils/compiler.h>\n+\n+#ifdef __cplusplus\n+extern \"C\" {\n+#endif\n+\n+/**\n+ * Tag hierarchy and enum definitions for camera_metadata_entry\n+ * =============================================================================\n+ */\n+\n+/**\n+ * Main enum definitions are in a separate file to make it easy to\n+ * maintain\n+ */\n+#include \"camera_metadata_tags.h\"\n+\n+/**\n+ * Enum range for each top-level category\n+ */\n+ANDROID_API\n+extern unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2];\n+ANDROID_API\n+extern const char *camera_metadata_section_names[ANDROID_SECTION_COUNT];\n+\n+/**\n+ * Type definitions for camera_metadata_entry\n+ * =============================================================================\n+ */\n+enum {\n+    // Unsigned 8-bit integer (uint8_t)\n+    TYPE_BYTE = 0,\n+    // Signed 32-bit integer (int32_t)\n+    TYPE_INT32 = 1,\n+    // 32-bit float (float)\n+    TYPE_FLOAT = 2,\n+    // Signed 64-bit integer (int64_t)\n+    TYPE_INT64 = 3,\n+    // 64-bit float (double)\n+    TYPE_DOUBLE = 4,\n+    // A 64-bit fraction (camera_metadata_rational_t)\n+    TYPE_RATIONAL = 5,\n+    // Number of type fields\n+    NUM_TYPES\n+};\n+\n+typedef struct camera_metadata_rational {\n+    int32_t numerator;\n+    int32_t denominator;\n+} camera_metadata_rational_t;\n+\n+/**\n+ * A reference to a metadata entry in a buffer.\n+ *\n+ * The data union pointers point to the real data in the buffer, and can be\n+ * modified in-place if the count does not need to change. The count is the\n+ * number of entries in data of the entry's type, not a count of bytes.\n+ */\n+typedef struct camera_metadata_entry {\n+    size_t   index;\n+    uint32_t tag;\n+    uint8_t  type;\n+    size_t   count;\n+    union {\n+        uint8_t *u8;\n+        int32_t *i32;\n+        float   *f;\n+        int64_t *i64;\n+        double  *d;\n+        camera_metadata_rational_t *r;\n+    } data;\n+} camera_metadata_entry_t;\n+\n+/**\n+ * A read-only reference to a metadata entry in a buffer. Identical to\n+ * camera_metadata_entry in layout\n+ */\n+typedef struct camera_metadata_ro_entry {\n+    size_t   index;\n+    uint32_t tag;\n+    uint8_t  type;\n+    size_t   count;\n+    union {\n+        const uint8_t *u8;\n+        const int32_t *i32;\n+        const float   *f;\n+        const int64_t *i64;\n+        const double  *d;\n+        const camera_metadata_rational_t *r;\n+    } data;\n+} camera_metadata_ro_entry_t;\n+\n+/**\n+ * Size in bytes of each entry type\n+ */\n+ANDROID_API\n+extern const size_t camera_metadata_type_size[NUM_TYPES];\n+\n+/**\n+ * Human-readable name of each entry type\n+ */\n+ANDROID_API\n+extern const char* camera_metadata_type_names[NUM_TYPES];\n+\n+/**\n+ * Main definitions for the metadata entry and array structures\n+ * =============================================================================\n+ */\n+\n+/**\n+ * A packet of metadata. This is a list of metadata entries, each of which has\n+ * an integer tag to identify its meaning, 'type' and 'count' field, and the\n+ * data, which contains a 'count' number of entries of type 'type'. The packet\n+ * has a fixed capacity for entries and for extra data.  A new entry uses up one\n+ * entry slot, and possibly some amount of data capacity; the function\n+ * calculate_camera_metadata_entry_data_size() provides the amount of data\n+ * capacity that would be used up by an entry.\n+ *\n+ * Entries are not sorted by default, and are not forced to be unique - multiple\n+ * entries with the same tag are allowed. The packet will not dynamically resize\n+ * when full.\n+ *\n+ * The packet is contiguous in memory, with size in bytes given by\n+ * get_camera_metadata_size(). Therefore, it can be copied safely with memcpy()\n+ * to a buffer of sufficient size. The copy_camera_metadata() function is\n+ * intended for eliminating unused capacity in the destination packet.\n+ */\n+struct camera_metadata;\n+typedef struct camera_metadata camera_metadata_t;\n+\n+/**\n+ * Functions for manipulating camera metadata\n+ * =============================================================================\n+ *\n+ * NOTE: Unless otherwise specified, functions that return type \"int\"\n+ * return 0 on success, and non-0 value on error.\n+ */\n+\n+/**\n+ * Allocate a new camera_metadata structure, with some initial space for entries\n+ * and extra data. The entry_capacity is measured in entry counts, and\n+ * data_capacity in bytes. The resulting structure is all contiguous in memory,\n+ * and can be freed with free_camera_metadata().\n+ */\n+ANDROID_API\n+camera_metadata_t *allocate_camera_metadata(size_t entry_capacity,\n+        size_t data_capacity);\n+\n+/**\n+ * Get the required alignment of a packet of camera metadata, which is the\n+ * maximal alignment of the embedded camera_metadata, camera_metadata_buffer_entry,\n+ * and camera_metadata_data.\n+ */\n+ANDROID_API\n+size_t get_camera_metadata_alignment();\n+\n+/**\n+ * Allocate a new camera_metadata structure of size src_size. Copy the data,\n+ * ignoring alignment, and then attempt validation. If validation\n+ * fails, free the memory and return NULL. Otherwise return the pointer.\n+ *\n+ * The resulting pointer can be freed with free_camera_metadata().\n+ */\n+ANDROID_API\n+camera_metadata_t *allocate_copy_camera_metadata_checked(\n+        const camera_metadata_t *src,\n+        size_t src_size);\n+\n+/**\n+ * Place a camera metadata structure into an existing buffer. Returns NULL if\n+ * the buffer is too small for the requested number of reserved entries and\n+ * bytes of data. The entry_capacity is measured in entry counts, and\n+ * data_capacity in bytes. If the buffer is larger than the required space,\n+ * unused space will be left at the end. If successful, returns a pointer to the\n+ * metadata header placed at the start of the buffer. It is the caller's\n+ * responsibility to free the original buffer; do not call\n+ * free_camera_metadata() with the returned pointer.\n+ */\n+ANDROID_API\n+camera_metadata_t *place_camera_metadata(void *dst, size_t dst_size,\n+        size_t entry_capacity,\n+        size_t data_capacity);\n+\n+/**\n+ * Free a camera_metadata structure. Should only be used with structures\n+ * allocated with allocate_camera_metadata().\n+ */\n+ANDROID_API\n+void free_camera_metadata(camera_metadata_t *metadata);\n+\n+/**\n+ * Calculate the buffer size needed for a metadata structure of entry_count\n+ * metadata entries, needing a total of data_count bytes of extra data storage.\n+ */\n+ANDROID_API\n+size_t calculate_camera_metadata_size(size_t entry_count,\n+        size_t data_count);\n+\n+/**\n+ * Get current size of entire metadata structure in bytes, including reserved\n+ * but unused space.\n+ */\n+ANDROID_API\n+size_t get_camera_metadata_size(const camera_metadata_t *metadata);\n+\n+/**\n+ * Get size of entire metadata buffer in bytes, not including reserved but\n+ * unused space. This is the amount of space needed by copy_camera_metadata for\n+ * its dst buffer.\n+ */\n+ANDROID_API\n+size_t get_camera_metadata_compact_size(const camera_metadata_t *metadata);\n+\n+/**\n+ * Get the current number of entries in the metadata packet.\n+ *\n+ * metadata packet must be valid, which can be checked before the call with\n+ * validate_camera_metadata_structure().\n+ */\n+ANDROID_API\n+size_t get_camera_metadata_entry_count(const camera_metadata_t *metadata);\n+\n+/**\n+ * Get the maximum number of entries that could fit in the metadata packet.\n+ */\n+ANDROID_API\n+size_t get_camera_metadata_entry_capacity(const camera_metadata_t *metadata);\n+\n+/**\n+ * Get the current count of bytes used for value storage in the metadata packet.\n+ */\n+ANDROID_API\n+size_t get_camera_metadata_data_count(const camera_metadata_t *metadata);\n+\n+/**\n+ * Get the maximum count of bytes that could be used for value storage in the\n+ * metadata packet.\n+ */\n+ANDROID_API\n+size_t get_camera_metadata_data_capacity(const camera_metadata_t *metadata);\n+\n+/**\n+ * Copy a metadata structure to a memory buffer, compacting it along the\n+ * way. That is, in the copied structure, entry_count == entry_capacity, and\n+ * data_count == data_capacity.\n+ *\n+ * If dst_size > get_camera_metadata_compact_size(), the unused bytes are at the\n+ * end of the buffer. If dst_size < get_camera_metadata_compact_size(), returns\n+ * NULL. Otherwise returns a pointer to the metadata structure header placed at\n+ * the start of dst.\n+ *\n+ * Since the buffer was not allocated by allocate_camera_metadata, the caller is\n+ * responsible for freeing the underlying buffer when needed; do not call\n+ * free_camera_metadata.\n+ */\n+ANDROID_API\n+camera_metadata_t *copy_camera_metadata(void *dst, size_t dst_size,\n+        const camera_metadata_t *src);\n+\n+\n+// Non-zero return values for validate_camera_metadata_structure\n+enum {\n+    CAMERA_METADATA_VALIDATION_ERROR = 1,\n+    CAMERA_METADATA_VALIDATION_SHIFTED = 2,\n+};\n+\n+/**\n+ * Validate that a metadata is structurally sane. That is, its internal\n+ * state is such that we won't get buffer overflows or run into other\n+ * 'impossible' issues when calling the other API functions.\n+ *\n+ * This is useful in particular after copying the binary metadata blob\n+ * from an untrusted source, since passing this check means the data is at least\n+ * consistent.\n+ *\n+ * The expected_size argument is optional.\n+ *\n+ * Returns 0: on success\n+ *         CAMERA_METADATA_VALIDATION_ERROR: on error\n+ *         CAMERA_METADATA_VALIDATION_SHIFTED: when the data is not properly aligned, but can be\n+ *                 used as input of clone_camera_metadata and the returned metadata will be valid.\n+ *\n+ */\n+ANDROID_API\n+int validate_camera_metadata_structure(const camera_metadata_t *metadata,\n+                                       const size_t *expected_size);\n+\n+/**\n+ * Append camera metadata in src to an existing metadata structure in dst.  This\n+ * does not resize the destination structure, so if it is too small, a non-zero\n+ * value is returned. On success, 0 is returned. Appending onto a sorted\n+ * structure results in a non-sorted combined structure.\n+ */\n+ANDROID_API\n+int append_camera_metadata(camera_metadata_t *dst, const camera_metadata_t *src);\n+\n+/**\n+ * Clone an existing metadata buffer, compacting along the way. This is\n+ * equivalent to allocating a new buffer of the minimum needed size, then\n+ * appending the buffer to be cloned into the new buffer. The resulting buffer\n+ * can be freed with free_camera_metadata(). Returns NULL if cloning failed.\n+ */\n+ANDROID_API\n+camera_metadata_t *clone_camera_metadata(const camera_metadata_t *src);\n+\n+/**\n+ * Calculate the number of bytes of extra data a given metadata entry will take\n+ * up. That is, if entry of 'type' with a payload of 'data_count' values is\n+ * added, how much will the value returned by get_camera_metadata_data_count()\n+ * be increased? This value may be zero, if no extra data storage is needed.\n+ */\n+ANDROID_API\n+size_t calculate_camera_metadata_entry_data_size(uint8_t type,\n+        size_t data_count);\n+\n+/**\n+ * Add a metadata entry to a metadata structure. Returns 0 if the addition\n+ * succeeded. Returns a non-zero value if there is insufficient reserved space\n+ * left to add the entry, or if the tag is unknown.  data_count is the number of\n+ * entries in the data array of the tag's type, not a count of\n+ * bytes. Vendor-defined tags can not be added using this method, unless\n+ * set_vendor_tag_query_ops() has been called first. Entries are always added to\n+ * the end of the structure (highest index), so after addition, a\n+ * previously-sorted array will be marked as unsorted.\n+ *\n+ * Returns 0 on success. A non-0 value is returned on error.\n+ */\n+ANDROID_API\n+int add_camera_metadata_entry(camera_metadata_t *dst,\n+        uint32_t tag,\n+        const void *data,\n+        size_t data_count);\n+\n+/**\n+ * Sort the metadata buffer for fast searching. If already marked as sorted,\n+ * does nothing. Adding or appending entries to the buffer will place the buffer\n+ * back into an unsorted state.\n+ *\n+ * Returns 0 on success. A non-0 value is returned on error.\n+ */\n+ANDROID_API\n+int sort_camera_metadata(camera_metadata_t *dst);\n+\n+/**\n+ * Get metadata entry at position index in the metadata buffer.\n+ * Index must be less than entry count, which is returned by\n+ * get_camera_metadata_entry_count().\n+ *\n+ * src and index are inputs; the passed-in entry is updated with the details of\n+ * the entry. The data pointer points to the real data in the buffer, and can be\n+ * updated as long as the data count does not change.\n+ *\n+ * Returns 0 on success. A non-0 value is returned on error.\n+ */\n+ANDROID_API\n+int get_camera_metadata_entry(camera_metadata_t *src,\n+        size_t index,\n+        camera_metadata_entry_t *entry);\n+\n+/**\n+ * Get metadata entry at position index, but disallow editing the data.\n+ */\n+ANDROID_API\n+int get_camera_metadata_ro_entry(const camera_metadata_t *src,\n+        size_t index,\n+        camera_metadata_ro_entry_t *entry);\n+\n+/**\n+ * Find an entry with given tag value. If not found, returns -ENOENT. Otherwise,\n+ * returns entry contents like get_camera_metadata_entry.\n+ *\n+ * If multiple entries with the same tag exist, does not have any guarantees on\n+ * which is returned. To speed up searching for tags, sort the metadata\n+ * structure first by calling sort_camera_metadata().\n+ */\n+ANDROID_API\n+int find_camera_metadata_entry(camera_metadata_t *src,\n+        uint32_t tag,\n+        camera_metadata_entry_t *entry);\n+\n+/**\n+ * Find an entry with given tag value, but disallow editing the data\n+ */\n+ANDROID_API\n+int find_camera_metadata_ro_entry(const camera_metadata_t *src,\n+        uint32_t tag,\n+        camera_metadata_ro_entry_t *entry);\n+\n+/**\n+ * Delete an entry at given index. This is an expensive operation, since it\n+ * requires repacking entries and possibly entry data. This also invalidates any\n+ * existing camera_metadata_entry.data pointers to this buffer. Sorting is\n+ * maintained.\n+ */\n+ANDROID_API\n+int delete_camera_metadata_entry(camera_metadata_t *dst,\n+        size_t index);\n+\n+/**\n+ * Updates a metadata entry with new data. If the data size is changing, may\n+ * need to adjust the data array, making this an O(N) operation. If the data\n+ * size is the same or still fits in the entry space, this is O(1). Maintains\n+ * sorting, but invalidates camera_metadata_entry instances that point to the\n+ * updated entry. If a non-NULL value is passed in to entry, the entry structure\n+ * is updated to match the new buffer state.  Returns a non-zero value if there\n+ * is no room for the new data in the buffer.\n+ */\n+ANDROID_API\n+int update_camera_metadata_entry(camera_metadata_t *dst,\n+        size_t index,\n+        const void *data,\n+        size_t data_count,\n+        camera_metadata_entry_t *updated_entry);\n+\n+/**\n+ * Retrieve human-readable name of section the tag is in. Returns NULL if\n+ * no such tag is defined. Returns NULL for tags in the vendor section, unless\n+ * set_vendor_tag_query_ops() has been used.\n+ */\n+ANDROID_API\n+const char *get_camera_metadata_section_name(uint32_t tag);\n+\n+/**\n+ * Retrieve human-readable name of tag (not including section). Returns NULL if\n+ * no such tag is defined. Returns NULL for tags in the vendor section, unless\n+ * set_vendor_tag_query_ops() has been used.\n+ */\n+ANDROID_API\n+const char *get_camera_metadata_tag_name(uint32_t tag);\n+\n+/**\n+ * Retrieve the type of a tag. Returns -1 if no such tag is defined. Returns -1\n+ * for tags in the vendor section, unless set_vendor_tag_query_ops() has been\n+ * used.\n+ */\n+ANDROID_API\n+int get_camera_metadata_tag_type(uint32_t tag);\n+\n+/**\n+ * Retrieve human-readable name of section the tag is in. Returns NULL if\n+ * no such tag is defined.\n+ */\n+ANDROID_API\n+const char *get_local_camera_metadata_section_name(uint32_t tag,\n+        const camera_metadata_t *meta);\n+\n+/**\n+ * Retrieve human-readable name of tag (not including section). Returns NULL if\n+ * no such tag is defined.\n+ */\n+ANDROID_API\n+const char *get_local_camera_metadata_tag_name(uint32_t tag,\n+        const camera_metadata_t *meta);\n+\n+/**\n+ * Retrieve the type of a tag. Returns -1 if no such tag is defined.\n+ */\n+ANDROID_API\n+int get_local_camera_metadata_tag_type(uint32_t tag,\n+        const camera_metadata_t *meta);\n+\n+/**\n+ * Set up vendor-specific tag query methods. These are needed to properly add\n+ * entries with vendor-specified tags and to use the\n+ * get_camera_metadata_section_name, _tag_name, and _tag_type methods with\n+ * vendor tags. Returns 0 on success.\n+ *\n+ * **DEPRECATED** - Please use vendor_tag_ops defined in camera_vendor_tags.h\n+ *        instead.\n+ */\n+typedef struct vendor_tag_query_ops vendor_tag_query_ops_t;\n+struct vendor_tag_query_ops {\n+    /**\n+     * Get vendor section name for a vendor-specified entry tag. Only called for\n+     * tags >= 0x80000000. The section name must start with the name of the\n+     * vendor in the Java package style. For example, CameraZoom inc must prefix\n+     * their sections with \"com.camerazoom.\" Must return NULL if the tag is\n+     * outside the bounds of vendor-defined sections.\n+     */\n+    const char *(*get_camera_vendor_section_name)(\n+        const vendor_tag_query_ops_t *v,\n+        uint32_t tag);\n+    /**\n+     * Get tag name for a vendor-specified entry tag. Only called for tags >=\n+     * 0x80000000. Must return NULL if the tag is outside the bounds of\n+     * vendor-defined sections.\n+     */\n+    const char *(*get_camera_vendor_tag_name)(\n+        const vendor_tag_query_ops_t *v,\n+        uint32_t tag);\n+    /**\n+     * Get tag type for a vendor-specified entry tag. Only called for tags >=\n+     * 0x80000000. Must return -1 if the tag is outside the bounds of\n+     * vendor-defined sections.\n+     */\n+    int (*get_camera_vendor_tag_type)(\n+        const vendor_tag_query_ops_t *v,\n+        uint32_t tag);\n+    /**\n+     * Get the number of vendor tags supported on this platform. Used to\n+     * calculate the size of buffer needed for holding the array of all tags\n+     * returned by get_camera_vendor_tags().\n+     */\n+    int (*get_camera_vendor_tag_count)(\n+        const vendor_tag_query_ops_t *v);\n+    /**\n+     * Fill an array with all the supported vendor tags on this platform.\n+     * get_camera_vendor_tag_count() returns the number of tags supported, and\n+     * tag_array should be allocated with enough space to hold all of the tags.\n+     */\n+    void (*get_camera_vendor_tags)(\n+        const vendor_tag_query_ops_t *v,\n+        uint32_t *tag_array);\n+};\n+\n+/**\n+ * **DEPRECATED** - This should only be used by the camera framework. Camera\n+ *      metadata will transition to using vendor_tag_ops defined in\n+ *      camera_vendor_tags.h instead.\n+ */\n+ANDROID_API\n+int set_camera_metadata_vendor_tag_ops(const vendor_tag_query_ops_t *query_ops);\n+\n+/**\n+ * Print fields in the metadata to the log.\n+ * verbosity = 0: Only tag entry information\n+ * verbosity = 1: Tag entry information plus at most 16 data values\n+ * verbosity = 2: All information\n+ */\n+ANDROID_API\n+void dump_camera_metadata(const camera_metadata_t *metadata,\n+        int fd,\n+        int verbosity);\n+\n+/**\n+ * Print fields in the metadata to the log; adds indentation parameter, which\n+ * specifies the number of spaces to insert before each line of the dump\n+ */\n+ANDROID_API\n+void dump_indented_camera_metadata(const camera_metadata_t *metadata,\n+        int fd,\n+        int verbosity,\n+        int indentation);\n+\n+/**\n+ * Prints the specified tag value as a string. Only works for enum tags.\n+ * Returns 0 on success, -1 on failure.\n+ */\n+ANDROID_API\n+int camera_metadata_enum_snprint(uint32_t tag,\n+                                 uint32_t value,\n+                                 char *dst,\n+                                 size_t size);\n+\n+#ifdef __cplusplus\n+}\n+#endif\n+\n+#endif\ndiff --git a/include/android/metadata/system/camera_metadata_tags.h b/include/android/metadata/system/camera_metadata_tags.h\nnew file mode 100644\nindex 000000000000..adf18b8f4c2f\n--- /dev/null\n+++ b/include/android/metadata/system/camera_metadata_tags.h\n@@ -0,0 +1,1005 @@\n+/*\n+ * Copyright (C) 2012 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+/**\n+ * !! Do not include this file directly !!\n+ *\n+ * Include camera_metadata.h instead.\n+ */\n+\n+/**\n+ * ! Do not edit this file directly !\n+ *\n+ * Generated automatically from camera_metadata_tags.mako\n+ */\n+\n+/** TODO: Nearly every enum in this file needs a description */\n+\n+/**\n+ * Top level hierarchy definitions for camera metadata. *_INFO sections are for\n+ * the static metadata that can be retrived without opening the camera device.\n+ * New sections must be added right before ANDROID_SECTION_COUNT to maintain\n+ * existing enumerations.\n+ */\n+typedef enum camera_metadata_section {\n+    ANDROID_COLOR_CORRECTION,\n+    ANDROID_CONTROL,\n+    ANDROID_DEMOSAIC,\n+    ANDROID_EDGE,\n+    ANDROID_FLASH,\n+    ANDROID_FLASH_INFO,\n+    ANDROID_HOT_PIXEL,\n+    ANDROID_JPEG,\n+    ANDROID_LENS,\n+    ANDROID_LENS_INFO,\n+    ANDROID_NOISE_REDUCTION,\n+    ANDROID_QUIRKS,\n+    ANDROID_REQUEST,\n+    ANDROID_SCALER,\n+    ANDROID_SENSOR,\n+    ANDROID_SENSOR_INFO,\n+    ANDROID_SHADING,\n+    ANDROID_STATISTICS,\n+    ANDROID_STATISTICS_INFO,\n+    ANDROID_TONEMAP,\n+    ANDROID_LED,\n+    ANDROID_INFO,\n+    ANDROID_BLACK_LEVEL,\n+    ANDROID_SYNC,\n+    ANDROID_REPROCESS,\n+    ANDROID_DEPTH,\n+    ANDROID_LOGICAL_MULTI_CAMERA,\n+    ANDROID_DISTORTION_CORRECTION,\n+    ANDROID_SECTION_COUNT,\n+\n+    VENDOR_SECTION = 0x8000\n+} camera_metadata_section_t;\n+\n+/**\n+ * Hierarchy positions in enum space. All vendor extension tags must be\n+ * defined with tag >= VENDOR_SECTION_START\n+ */\n+typedef enum camera_metadata_section_start {\n+    ANDROID_COLOR_CORRECTION_START = ANDROID_COLOR_CORRECTION  << 16,\n+    ANDROID_CONTROL_START          = ANDROID_CONTROL           << 16,\n+    ANDROID_DEMOSAIC_START         = ANDROID_DEMOSAIC          << 16,\n+    ANDROID_EDGE_START             = ANDROID_EDGE              << 16,\n+    ANDROID_FLASH_START            = ANDROID_FLASH             << 16,\n+    ANDROID_FLASH_INFO_START       = ANDROID_FLASH_INFO        << 16,\n+    ANDROID_HOT_PIXEL_START        = ANDROID_HOT_PIXEL         << 16,\n+    ANDROID_JPEG_START             = ANDROID_JPEG              << 16,\n+    ANDROID_LENS_START             = ANDROID_LENS              << 16,\n+    ANDROID_LENS_INFO_START        = ANDROID_LENS_INFO         << 16,\n+    ANDROID_NOISE_REDUCTION_START  = ANDROID_NOISE_REDUCTION   << 16,\n+    ANDROID_QUIRKS_START           = ANDROID_QUIRKS            << 16,\n+    ANDROID_REQUEST_START          = ANDROID_REQUEST           << 16,\n+    ANDROID_SCALER_START           = ANDROID_SCALER            << 16,\n+    ANDROID_SENSOR_START           = ANDROID_SENSOR            << 16,\n+    ANDROID_SENSOR_INFO_START      = ANDROID_SENSOR_INFO       << 16,\n+    ANDROID_SHADING_START          = ANDROID_SHADING           << 16,\n+    ANDROID_STATISTICS_START       = ANDROID_STATISTICS        << 16,\n+    ANDROID_STATISTICS_INFO_START  = ANDROID_STATISTICS_INFO   << 16,\n+    ANDROID_TONEMAP_START          = ANDROID_TONEMAP           << 16,\n+    ANDROID_LED_START              = ANDROID_LED               << 16,\n+    ANDROID_INFO_START             = ANDROID_INFO              << 16,\n+    ANDROID_BLACK_LEVEL_START      = ANDROID_BLACK_LEVEL       << 16,\n+    ANDROID_SYNC_START             = ANDROID_SYNC              << 16,\n+    ANDROID_REPROCESS_START        = ANDROID_REPROCESS         << 16,\n+    ANDROID_DEPTH_START            = ANDROID_DEPTH             << 16,\n+    ANDROID_LOGICAL_MULTI_CAMERA_START\n+                                   = ANDROID_LOGICAL_MULTI_CAMERA\n+                                                                << 16,\n+    ANDROID_DISTORTION_CORRECTION_START\n+                                   = ANDROID_DISTORTION_CORRECTION\n+                                                                << 16,\n+    VENDOR_SECTION_START           = VENDOR_SECTION            << 16\n+} camera_metadata_section_start_t;\n+\n+/**\n+ * Main enum for defining camera metadata tags.  New entries must always go\n+ * before the section _END tag to preserve existing enumeration values.  In\n+ * addition, the name and type of the tag needs to be added to\n+ * system/media/camera/src/camera_metadata_tag_info.c\n+ */\n+typedef enum camera_metadata_tag {\n+    ANDROID_COLOR_CORRECTION_MODE =                   // enum         | public       | HIDL v3.2\n+            ANDROID_COLOR_CORRECTION_START,\n+    ANDROID_COLOR_CORRECTION_TRANSFORM,               // rational[]   | public       | HIDL v3.2\n+    ANDROID_COLOR_CORRECTION_GAINS,                   // float[]      | public       | HIDL v3.2\n+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE,         // enum         | public       | HIDL v3.2\n+    ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,\n+                                                      // byte[]       | public       | HIDL v3.2\n+    ANDROID_COLOR_CORRECTION_END,\n+\n+    ANDROID_CONTROL_AE_ANTIBANDING_MODE =             // enum         | public       | HIDL v3.2\n+            ANDROID_CONTROL_START,\n+    ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,         // int32        | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_LOCK,                          // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_MODE,                          // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_REGIONS,                       // int32[]      | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_TARGET_FPS_RANGE,              // int32[]      | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,            // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AF_MODE,                          // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AF_REGIONS,                       // int32[]      | public       | HIDL v3.2\n+    ANDROID_CONTROL_AF_TRIGGER,                       // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AWB_LOCK,                         // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE,                         // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AWB_REGIONS,                      // int32[]      | public       | HIDL v3.2\n+    ANDROID_CONTROL_CAPTURE_INTENT,                   // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE,                      // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_MODE,                             // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE,                       // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,         // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,   // byte[]       | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_AVAILABLE_MODES,               // byte[]       | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,   // int32[]      | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_COMPENSATION_RANGE,            // int32[]      | public       | HIDL v3.2\n+    ANDROID_CONTROL_AE_COMPENSATION_STEP,             // rational     | public       | HIDL v3.2\n+    ANDROID_CONTROL_AF_AVAILABLE_MODES,               // byte[]       | public       | HIDL v3.2\n+    ANDROID_CONTROL_AVAILABLE_EFFECTS,                // byte[]       | public       | HIDL v3.2\n+    ANDROID_CONTROL_AVAILABLE_SCENE_MODES,            // byte[]       | public       | HIDL v3.2\n+    ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,\n+                                                      // byte[]       | public       | HIDL v3.2\n+    ANDROID_CONTROL_AWB_AVAILABLE_MODES,              // byte[]       | public       | HIDL v3.2\n+    ANDROID_CONTROL_MAX_REGIONS,                      // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_OVERRIDES,             // byte[]       | system       | HIDL v3.2\n+    ANDROID_CONTROL_AE_PRECAPTURE_ID,                 // int32        | system       | HIDL v3.2\n+    ANDROID_CONTROL_AE_STATE,                         // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AF_STATE,                         // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AF_TRIGGER_ID,                    // int32        | system       | HIDL v3.2\n+    ANDROID_CONTROL_AWB_STATE,                        // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,\n+                                                      // int32[]      | hidden       | HIDL v3.2\n+    ANDROID_CONTROL_AE_LOCK_AVAILABLE,                // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE,               // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AVAILABLE_MODES,                  // byte[]       | public       | HIDL v3.2\n+    ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE, // int32[]      | public       | HIDL v3.2\n+    ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,       // int32        | public       | HIDL v3.2\n+    ANDROID_CONTROL_ENABLE_ZSL,                       // enum         | public       | HIDL v3.2\n+    ANDROID_CONTROL_AF_SCENE_CHANGE,                  // enum         | public       | HIDL v3.3\n+    ANDROID_CONTROL_END,\n+\n+    ANDROID_DEMOSAIC_MODE =                           // enum         | system       | HIDL v3.2\n+            ANDROID_DEMOSAIC_START,\n+    ANDROID_DEMOSAIC_END,\n+\n+    ANDROID_EDGE_MODE =                               // enum         | public       | HIDL v3.2\n+            ANDROID_EDGE_START,\n+    ANDROID_EDGE_STRENGTH,                            // byte         | system       | HIDL v3.2\n+    ANDROID_EDGE_AVAILABLE_EDGE_MODES,                // byte[]       | public       | HIDL v3.2\n+    ANDROID_EDGE_END,\n+\n+    ANDROID_FLASH_FIRING_POWER =                      // byte         | system       | HIDL v3.2\n+            ANDROID_FLASH_START,\n+    ANDROID_FLASH_FIRING_TIME,                        // int64        | system       | HIDL v3.2\n+    ANDROID_FLASH_MODE,                               // enum         | public       | HIDL v3.2\n+    ANDROID_FLASH_COLOR_TEMPERATURE,                  // byte         | system       | HIDL v3.2\n+    ANDROID_FLASH_MAX_ENERGY,                         // byte         | system       | HIDL v3.2\n+    ANDROID_FLASH_STATE,                              // enum         | public       | HIDL v3.2\n+    ANDROID_FLASH_END,\n+\n+    ANDROID_FLASH_INFO_AVAILABLE =                    // enum         | public       | HIDL v3.2\n+            ANDROID_FLASH_INFO_START,\n+    ANDROID_FLASH_INFO_CHARGE_DURATION,               // int64        | system       | HIDL v3.2\n+    ANDROID_FLASH_INFO_END,\n+\n+    ANDROID_HOT_PIXEL_MODE =                          // enum         | public       | HIDL v3.2\n+            ANDROID_HOT_PIXEL_START,\n+    ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,      // byte[]       | public       | HIDL v3.2\n+    ANDROID_HOT_PIXEL_END,\n+\n+    ANDROID_JPEG_GPS_COORDINATES =                    // double[]     | ndk_public   | HIDL v3.2\n+            ANDROID_JPEG_START,\n+    ANDROID_JPEG_GPS_PROCESSING_METHOD,               // byte         | ndk_public   | HIDL v3.2\n+    ANDROID_JPEG_GPS_TIMESTAMP,                       // int64        | ndk_public   | HIDL v3.2\n+    ANDROID_JPEG_ORIENTATION,                         // int32        | public       | HIDL v3.2\n+    ANDROID_JPEG_QUALITY,                             // byte         | public       | HIDL v3.2\n+    ANDROID_JPEG_THUMBNAIL_QUALITY,                   // byte         | public       | HIDL v3.2\n+    ANDROID_JPEG_THUMBNAIL_SIZE,                      // int32[]      | public       | HIDL v3.2\n+    ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,           // int32[]      | public       | HIDL v3.2\n+    ANDROID_JPEG_MAX_SIZE,                            // int32        | system       | HIDL v3.2\n+    ANDROID_JPEG_SIZE,                                // int32        | system       | HIDL v3.2\n+    ANDROID_JPEG_END,\n+\n+    ANDROID_LENS_APERTURE =                           // float        | public       | HIDL v3.2\n+            ANDROID_LENS_START,\n+    ANDROID_LENS_FILTER_DENSITY,                      // float        | public       | HIDL v3.2\n+    ANDROID_LENS_FOCAL_LENGTH,                        // float        | public       | HIDL v3.2\n+    ANDROID_LENS_FOCUS_DISTANCE,                      // float        | public       | HIDL v3.2\n+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE,          // enum         | public       | HIDL v3.2\n+    ANDROID_LENS_FACING,                              // enum         | public       | HIDL v3.2\n+    ANDROID_LENS_POSE_ROTATION,                       // float[]      | public       | HIDL v3.2\n+    ANDROID_LENS_POSE_TRANSLATION,                    // float[]      | public       | HIDL v3.2\n+    ANDROID_LENS_FOCUS_RANGE,                         // float[]      | public       | HIDL v3.2\n+    ANDROID_LENS_STATE,                               // enum         | public       | HIDL v3.2\n+    ANDROID_LENS_INTRINSIC_CALIBRATION,               // float[]      | public       | HIDL v3.2\n+    ANDROID_LENS_RADIAL_DISTORTION,                   // float[]      | public       | HIDL v3.2\n+    ANDROID_LENS_POSE_REFERENCE,                      // enum         | public       | HIDL v3.3\n+    ANDROID_LENS_DISTORTION,                          // float[]      | public       | HIDL v3.3\n+    ANDROID_LENS_END,\n+\n+    ANDROID_LENS_INFO_AVAILABLE_APERTURES =           // float[]      | public       | HIDL v3.2\n+            ANDROID_LENS_INFO_START,\n+    ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,     // float[]      | public       | HIDL v3.2\n+    ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,        // float[]      | public       | HIDL v3.2\n+    ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,// byte[]       | public       | HIDL v3.2\n+    ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,            // float        | public       | HIDL v3.2\n+    ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,         // float        | public       | HIDL v3.2\n+    ANDROID_LENS_INFO_SHADING_MAP_SIZE,               // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,     // enum         | public       | HIDL v3.2\n+    ANDROID_LENS_INFO_END,\n+\n+    ANDROID_NOISE_REDUCTION_MODE =                    // enum         | public       | HIDL v3.2\n+            ANDROID_NOISE_REDUCTION_START,\n+    ANDROID_NOISE_REDUCTION_STRENGTH,                 // byte         | system       | HIDL v3.2\n+    ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,\n+                                                      // byte[]       | public       | HIDL v3.2\n+    ANDROID_NOISE_REDUCTION_END,\n+\n+    ANDROID_QUIRKS_METERING_CROP_REGION =             // byte         | system       | HIDL v3.2\n+            ANDROID_QUIRKS_START,\n+    ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO,              // byte         | system       | HIDL v3.2\n+    ANDROID_QUIRKS_USE_ZSL_FORMAT,                    // byte         | system       | HIDL v3.2\n+    ANDROID_QUIRKS_USE_PARTIAL_RESULT,                // byte         | hidden       | HIDL v3.2\n+    ANDROID_QUIRKS_PARTIAL_RESULT,                    // enum         | hidden       | HIDL v3.2\n+    ANDROID_QUIRKS_END,\n+\n+    ANDROID_REQUEST_FRAME_COUNT =                     // int32        | hidden       | HIDL v3.2\n+            ANDROID_REQUEST_START,\n+    ANDROID_REQUEST_ID,                               // int32        | hidden       | HIDL v3.2\n+    ANDROID_REQUEST_INPUT_STREAMS,                    // int32[]      | system       | HIDL v3.2\n+    ANDROID_REQUEST_METADATA_MODE,                    // enum         | system       | HIDL v3.2\n+    ANDROID_REQUEST_OUTPUT_STREAMS,                   // int32[]      | system       | HIDL v3.2\n+    ANDROID_REQUEST_TYPE,                             // enum         | system       | HIDL v3.2\n+    ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,           // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS,        // int32[]      | system       | HIDL v3.2\n+    ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,            // int32        | java_public  | HIDL v3.2\n+    ANDROID_REQUEST_PIPELINE_DEPTH,                   // byte         | public       | HIDL v3.2\n+    ANDROID_REQUEST_PIPELINE_MAX_DEPTH,               // byte         | public       | HIDL v3.2\n+    ANDROID_REQUEST_PARTIAL_RESULT_COUNT,             // int32        | public       | HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES,           // enum[]       | public       | HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,           // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,            // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,   // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,           // int32[]      | ndk_public   | HIDL v3.3\n+    ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,\n+                                                      // int32[]      | hidden       | HIDL v3.3\n+    ANDROID_REQUEST_END,\n+\n+    ANDROID_SCALER_CROP_REGION =                      // int32[]      | public       | HIDL v3.2\n+            ANDROID_SCALER_START,\n+    ANDROID_SCALER_AVAILABLE_FORMATS,                 // enum[]       | hidden       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,      // int64[]      | hidden       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_JPEG_SIZES,              // int32[]      | hidden       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,        // float        | public       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, // int64[]      | hidden       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,         // int32[]      | hidden       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,       // int64[]      | system       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_RAW_SIZES,               // int32[]      | system       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,// int32        | hidden       | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,   // enum[]       | ndk_public   | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,     // int64[]      | ndk_public   | HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,         // int64[]      | ndk_public   | HIDL v3.2\n+    ANDROID_SCALER_CROPPING_TYPE,                     // enum         | public       | HIDL v3.2\n+    ANDROID_SCALER_END,\n+\n+    ANDROID_SENSOR_EXPOSURE_TIME =                    // int64        | public       | HIDL v3.2\n+            ANDROID_SENSOR_START,\n+    ANDROID_SENSOR_FRAME_DURATION,                    // int64        | public       | HIDL v3.2\n+    ANDROID_SENSOR_SENSITIVITY,                       // int32        | public       | HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1,             // enum         | public       | HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT2,             // byte         | public       | HIDL v3.2\n+    ANDROID_SENSOR_CALIBRATION_TRANSFORM1,            // rational[]   | public       | HIDL v3.2\n+    ANDROID_SENSOR_CALIBRATION_TRANSFORM2,            // rational[]   | public       | HIDL v3.2\n+    ANDROID_SENSOR_COLOR_TRANSFORM1,                  // rational[]   | public       | HIDL v3.2\n+    ANDROID_SENSOR_COLOR_TRANSFORM2,                  // rational[]   | public       | HIDL v3.2\n+    ANDROID_SENSOR_FORWARD_MATRIX1,                   // rational[]   | public       | HIDL v3.2\n+    ANDROID_SENSOR_FORWARD_MATRIX2,                   // rational[]   | public       | HIDL v3.2\n+    ANDROID_SENSOR_BASE_GAIN_FACTOR,                  // rational     | system       | HIDL v3.2\n+    ANDROID_SENSOR_BLACK_LEVEL_PATTERN,               // int32[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,            // int32        | public       | HIDL v3.2\n+    ANDROID_SENSOR_ORIENTATION,                       // int32        | public       | HIDL v3.2\n+    ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS,    // int32[]      | system       | HIDL v3.2\n+    ANDROID_SENSOR_TIMESTAMP,                         // int64        | public       | HIDL v3.2\n+    ANDROID_SENSOR_TEMPERATURE,                       // float        | system       | HIDL v3.2\n+    ANDROID_SENSOR_NEUTRAL_COLOR_POINT,               // rational[]   | public       | HIDL v3.2\n+    ANDROID_SENSOR_NOISE_PROFILE,                     // double[]     | public       | HIDL v3.2\n+    ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,               // float[]      | system       | HIDL v3.2\n+    ANDROID_SENSOR_PROFILE_TONE_CURVE,                // float[]      | system       | HIDL v3.2\n+    ANDROID_SENSOR_GREEN_SPLIT,                       // float        | public       | HIDL v3.2\n+    ANDROID_SENSOR_TEST_PATTERN_DATA,                 // int32[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_TEST_PATTERN_MODE,                 // enum         | public       | HIDL v3.2\n+    ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,      // int32[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,              // int64        | public       | HIDL v3.2\n+    ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,             // int32[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,               // float[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,               // int32        | public       | HIDL v3.2\n+    ANDROID_SENSOR_OPAQUE_RAW_SIZE,                   // int32[]      | system       | HIDL v3.2\n+    ANDROID_SENSOR_END,\n+\n+    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE =           // int32[]      | public       | HIDL v3.2\n+            ANDROID_SENSOR_INFO_START,\n+    ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,            // int32[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,     // enum         | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,          // int64[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,           // int64        | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_PHYSICAL_SIZE,                // float[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,             // int32[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_WHITE_LEVEL,                  // int32        | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,             // enum         | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED,         // enum         | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,\n+                                                      // int32[]      | public       | HIDL v3.2\n+    ANDROID_SENSOR_INFO_END,\n+\n+    ANDROID_SHADING_MODE =                            // enum         | public       | HIDL v3.2\n+            ANDROID_SHADING_START,\n+    ANDROID_SHADING_STRENGTH,                         // byte         | system       | HIDL v3.2\n+    ANDROID_SHADING_AVAILABLE_MODES,                  // byte[]       | public       | HIDL v3.2\n+    ANDROID_SHADING_END,\n+\n+    ANDROID_STATISTICS_FACE_DETECT_MODE =             // enum         | public       | HIDL v3.2\n+            ANDROID_STATISTICS_START,\n+    ANDROID_STATISTICS_HISTOGRAM_MODE,                // enum         | system       | HIDL v3.2\n+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE,            // enum         | system       | HIDL v3.2\n+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,            // enum         | public       | HIDL v3.2\n+    ANDROID_STATISTICS_FACE_IDS,                      // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_STATISTICS_FACE_LANDMARKS,                // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_STATISTICS_FACE_RECTANGLES,               // int32[]      | ndk_public   | HIDL v3.2\n+    ANDROID_STATISTICS_FACE_SCORES,                   // byte[]       | ndk_public   | HIDL v3.2\n+    ANDROID_STATISTICS_HISTOGRAM,                     // int32[]      | system       | HIDL v3.2\n+    ANDROID_STATISTICS_SHARPNESS_MAP,                 // int32[]      | system       | HIDL v3.2\n+    ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP,   // byte         | java_public  | HIDL v3.2\n+    ANDROID_STATISTICS_LENS_SHADING_MAP,              // float[]      | ndk_public   | HIDL v3.2\n+    ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,         // float[]      | hidden       | HIDL v3.2\n+    ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,     // rational[]   | hidden       | HIDL v3.2\n+    ANDROID_STATISTICS_SCENE_FLICKER,                 // enum         | public       | HIDL v3.2\n+    ANDROID_STATISTICS_HOT_PIXEL_MAP,                 // int32[]      | public       | HIDL v3.2\n+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,         // enum         | public       | HIDL v3.2\n+    ANDROID_STATISTICS_OIS_DATA_MODE,                 // enum         | public       | HIDL v3.3\n+    ANDROID_STATISTICS_OIS_TIMESTAMPS,                // int64[]      | ndk_public   | HIDL v3.3\n+    ANDROID_STATISTICS_OIS_X_SHIFTS,                  // float[]      | ndk_public   | HIDL v3.3\n+    ANDROID_STATISTICS_OIS_Y_SHIFTS,                  // float[]      | ndk_public   | HIDL v3.3\n+    ANDROID_STATISTICS_END,\n+\n+    ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = \n+                                                      // byte[]       | public       | HIDL v3.2\n+            ANDROID_STATISTICS_INFO_START,\n+    ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,   // int32        | system       | HIDL v3.2\n+    ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,           // int32        | public       | HIDL v3.2\n+    ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,      // int32        | system       | HIDL v3.2\n+    ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,  // int32        | system       | HIDL v3.2\n+    ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,       // int32[]      | system       | HIDL v3.2\n+    ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,\n+                                                      // byte[]       | public       | HIDL v3.2\n+    ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,\n+                                                      // byte[]       | public       | HIDL v3.2\n+    ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES, // byte[]       | public       | HIDL v3.3\n+    ANDROID_STATISTICS_INFO_END,\n+\n+    ANDROID_TONEMAP_CURVE_BLUE =                      // float[]      | ndk_public   | HIDL v3.2\n+            ANDROID_TONEMAP_START,\n+    ANDROID_TONEMAP_CURVE_GREEN,                      // float[]      | ndk_public   | HIDL v3.2\n+    ANDROID_TONEMAP_CURVE_RED,                        // float[]      | ndk_public   | HIDL v3.2\n+    ANDROID_TONEMAP_MODE,                             // enum         | public       | HIDL v3.2\n+    ANDROID_TONEMAP_MAX_CURVE_POINTS,                 // int32        | public       | HIDL v3.2\n+    ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,         // byte[]       | public       | HIDL v3.2\n+    ANDROID_TONEMAP_GAMMA,                            // float        | public       | HIDL v3.2\n+    ANDROID_TONEMAP_PRESET_CURVE,                     // enum         | public       | HIDL v3.2\n+    ANDROID_TONEMAP_END,\n+\n+    ANDROID_LED_TRANSMIT =                            // enum         | hidden       | HIDL v3.2\n+            ANDROID_LED_START,\n+    ANDROID_LED_AVAILABLE_LEDS,                       // enum[]       | hidden       | HIDL v3.2\n+    ANDROID_LED_END,\n+\n+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL =           // enum         | public       | HIDL v3.2\n+            ANDROID_INFO_START,\n+    ANDROID_INFO_VERSION,                             // byte         | public       | HIDL v3.3\n+    ANDROID_INFO_END,\n+\n+    ANDROID_BLACK_LEVEL_LOCK =                        // enum         | public       | HIDL v3.2\n+            ANDROID_BLACK_LEVEL_START,\n+    ANDROID_BLACK_LEVEL_END,\n+\n+    ANDROID_SYNC_FRAME_NUMBER =                       // enum         | ndk_public   | HIDL v3.2\n+            ANDROID_SYNC_START,\n+    ANDROID_SYNC_MAX_LATENCY,                         // enum         | public       | HIDL v3.2\n+    ANDROID_SYNC_END,\n+\n+    ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR =     // float        | java_public  | HIDL v3.2\n+            ANDROID_REPROCESS_START,\n+    ANDROID_REPROCESS_MAX_CAPTURE_STALL,              // int32        | java_public  | HIDL v3.2\n+    ANDROID_REPROCESS_END,\n+\n+    ANDROID_DEPTH_MAX_DEPTH_SAMPLES =                 // int32        | system       | HIDL v3.2\n+            ANDROID_DEPTH_START,\n+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,\n+                                                      // enum[]       | ndk_public   | HIDL v3.2\n+    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,// int64[]      | ndk_public   | HIDL v3.2\n+    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,    // int64[]      | ndk_public   | HIDL v3.2\n+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,                 // enum         | public       | HIDL v3.2\n+    ANDROID_DEPTH_END,\n+\n+    ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS =       // byte[]       | hidden       | HIDL v3.3\n+            ANDROID_LOGICAL_MULTI_CAMERA_START,\n+    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE,    // enum         | public       | HIDL v3.3\n+    ANDROID_LOGICAL_MULTI_CAMERA_END,\n+\n+    ANDROID_DISTORTION_CORRECTION_MODE =              // enum         | public       | HIDL v3.3\n+            ANDROID_DISTORTION_CORRECTION_START,\n+    ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES,    // byte[]       | public       | HIDL v3.3\n+    ANDROID_DISTORTION_CORRECTION_END,\n+\n+} camera_metadata_tag_t;\n+\n+/**\n+ * Enumeration definitions for the various entries that need them\n+ */\n+\n+// ANDROID_COLOR_CORRECTION_MODE\n+typedef enum camera_metadata_enum_android_color_correction_mode {\n+    ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX                  , // HIDL v3.2\n+    ANDROID_COLOR_CORRECTION_MODE_FAST                              , // HIDL v3.2\n+    ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY                      , // HIDL v3.2\n+} camera_metadata_enum_android_color_correction_mode_t;\n+\n+// ANDROID_COLOR_CORRECTION_ABERRATION_MODE\n+typedef enum camera_metadata_enum_android_color_correction_aberration_mode {\n+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF                    , // HIDL v3.2\n+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST                   , // HIDL v3.2\n+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY           , // HIDL v3.2\n+} camera_metadata_enum_android_color_correction_aberration_mode_t;\n+\n+\n+// ANDROID_CONTROL_AE_ANTIBANDING_MODE\n+typedef enum camera_metadata_enum_android_control_ae_antibanding_mode {\n+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF                         , // HIDL v3.2\n+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ                        , // HIDL v3.2\n+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ                        , // HIDL v3.2\n+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO                        , // HIDL v3.2\n+} camera_metadata_enum_android_control_ae_antibanding_mode_t;\n+\n+// ANDROID_CONTROL_AE_LOCK\n+typedef enum camera_metadata_enum_android_control_ae_lock {\n+    ANDROID_CONTROL_AE_LOCK_OFF                                     , // HIDL v3.2\n+    ANDROID_CONTROL_AE_LOCK_ON                                      , // HIDL v3.2\n+} camera_metadata_enum_android_control_ae_lock_t;\n+\n+// ANDROID_CONTROL_AE_MODE\n+typedef enum camera_metadata_enum_android_control_ae_mode {\n+    ANDROID_CONTROL_AE_MODE_OFF                                     , // HIDL v3.2\n+    ANDROID_CONTROL_AE_MODE_ON                                      , // HIDL v3.2\n+    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH                           , // HIDL v3.2\n+    ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH                         , // HIDL v3.2\n+    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE                    , // HIDL v3.2\n+    ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH                       , // HIDL v3.3\n+} camera_metadata_enum_android_control_ae_mode_t;\n+\n+// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER\n+typedef enum camera_metadata_enum_android_control_ae_precapture_trigger {\n+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE                      , // HIDL v3.2\n+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START                     , // HIDL v3.2\n+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL                    , // HIDL v3.2\n+} camera_metadata_enum_android_control_ae_precapture_trigger_t;\n+\n+// ANDROID_CONTROL_AF_MODE\n+typedef enum camera_metadata_enum_android_control_af_mode {\n+    ANDROID_CONTROL_AF_MODE_OFF                                     , // HIDL v3.2\n+    ANDROID_CONTROL_AF_MODE_AUTO                                    , // HIDL v3.2\n+    ANDROID_CONTROL_AF_MODE_MACRO                                   , // HIDL v3.2\n+    ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO                        , // HIDL v3.2\n+    ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE                      , // HIDL v3.2\n+    ANDROID_CONTROL_AF_MODE_EDOF                                    , // HIDL v3.2\n+} camera_metadata_enum_android_control_af_mode_t;\n+\n+// ANDROID_CONTROL_AF_TRIGGER\n+typedef enum camera_metadata_enum_android_control_af_trigger {\n+    ANDROID_CONTROL_AF_TRIGGER_IDLE                                 , // HIDL v3.2\n+    ANDROID_CONTROL_AF_TRIGGER_START                                , // HIDL v3.2\n+    ANDROID_CONTROL_AF_TRIGGER_CANCEL                               , // HIDL v3.2\n+} camera_metadata_enum_android_control_af_trigger_t;\n+\n+// ANDROID_CONTROL_AWB_LOCK\n+typedef enum camera_metadata_enum_android_control_awb_lock {\n+    ANDROID_CONTROL_AWB_LOCK_OFF                                    , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_LOCK_ON                                     , // HIDL v3.2\n+} camera_metadata_enum_android_control_awb_lock_t;\n+\n+// ANDROID_CONTROL_AWB_MODE\n+typedef enum camera_metadata_enum_android_control_awb_mode {\n+    ANDROID_CONTROL_AWB_MODE_OFF                                    , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE_AUTO                                   , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE_INCANDESCENT                           , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE_FLUORESCENT                            , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT                       , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE_DAYLIGHT                               , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT                        , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE_TWILIGHT                               , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_MODE_SHADE                                  , // HIDL v3.2\n+} camera_metadata_enum_android_control_awb_mode_t;\n+\n+// ANDROID_CONTROL_CAPTURE_INTENT\n+typedef enum camera_metadata_enum_android_control_capture_intent {\n+    ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM                           , // HIDL v3.2\n+    ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW                          , // HIDL v3.2\n+    ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE                    , // HIDL v3.2\n+    ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD                     , // HIDL v3.2\n+    ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT                   , // HIDL v3.2\n+    ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG                 , // HIDL v3.2\n+    ANDROID_CONTROL_CAPTURE_INTENT_MANUAL                           , // HIDL v3.2\n+    ANDROID_CONTROL_CAPTURE_INTENT_MOTION_TRACKING                  , // HIDL v3.3\n+} camera_metadata_enum_android_control_capture_intent_t;\n+\n+// ANDROID_CONTROL_EFFECT_MODE\n+typedef enum camera_metadata_enum_android_control_effect_mode {\n+    ANDROID_CONTROL_EFFECT_MODE_OFF                                 , // HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE_MONO                                , // HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE_NEGATIVE                            , // HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE_SOLARIZE                            , // HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE_SEPIA                               , // HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE_POSTERIZE                           , // HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD                          , // HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD                          , // HIDL v3.2\n+    ANDROID_CONTROL_EFFECT_MODE_AQUA                                , // HIDL v3.2\n+} camera_metadata_enum_android_control_effect_mode_t;\n+\n+// ANDROID_CONTROL_MODE\n+typedef enum camera_metadata_enum_android_control_mode {\n+    ANDROID_CONTROL_MODE_OFF                                        , // HIDL v3.2\n+    ANDROID_CONTROL_MODE_AUTO                                       , // HIDL v3.2\n+    ANDROID_CONTROL_MODE_USE_SCENE_MODE                             , // HIDL v3.2\n+    ANDROID_CONTROL_MODE_OFF_KEEP_STATE                             , // HIDL v3.2\n+} camera_metadata_enum_android_control_mode_t;\n+\n+// ANDROID_CONTROL_SCENE_MODE\n+typedef enum camera_metadata_enum_android_control_scene_mode {\n+    ANDROID_CONTROL_SCENE_MODE_DISABLED                              = 0, // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY                        , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_ACTION                               , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_PORTRAIT                             , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_LANDSCAPE                            , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_NIGHT                                , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT                       , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_THEATRE                              , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_BEACH                                , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_SNOW                                 , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_SUNSET                               , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO                          , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_FIREWORKS                            , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_SPORTS                               , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_PARTY                                , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT                          , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_BARCODE                              , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO                     , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_HDR                                  , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT              , // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START                   = 100, // HIDL v3.2\n+    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END                     = 127, // HIDL v3.2\n+} camera_metadata_enum_android_control_scene_mode_t;\n+\n+// ANDROID_CONTROL_VIDEO_STABILIZATION_MODE\n+typedef enum camera_metadata_enum_android_control_video_stabilization_mode {\n+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF                    , // HIDL v3.2\n+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON                     , // HIDL v3.2\n+} camera_metadata_enum_android_control_video_stabilization_mode_t;\n+\n+// ANDROID_CONTROL_AE_STATE\n+typedef enum camera_metadata_enum_android_control_ae_state {\n+    ANDROID_CONTROL_AE_STATE_INACTIVE                               , // HIDL v3.2\n+    ANDROID_CONTROL_AE_STATE_SEARCHING                              , // HIDL v3.2\n+    ANDROID_CONTROL_AE_STATE_CONVERGED                              , // HIDL v3.2\n+    ANDROID_CONTROL_AE_STATE_LOCKED                                 , // HIDL v3.2\n+    ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED                         , // HIDL v3.2\n+    ANDROID_CONTROL_AE_STATE_PRECAPTURE                             , // HIDL v3.2\n+} camera_metadata_enum_android_control_ae_state_t;\n+\n+// ANDROID_CONTROL_AF_STATE\n+typedef enum camera_metadata_enum_android_control_af_state {\n+    ANDROID_CONTROL_AF_STATE_INACTIVE                               , // HIDL v3.2\n+    ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN                           , // HIDL v3.2\n+    ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED                        , // HIDL v3.2\n+    ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN                            , // HIDL v3.2\n+    ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED                         , // HIDL v3.2\n+    ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED                     , // HIDL v3.2\n+    ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED                      , // HIDL v3.2\n+} camera_metadata_enum_android_control_af_state_t;\n+\n+// ANDROID_CONTROL_AWB_STATE\n+typedef enum camera_metadata_enum_android_control_awb_state {\n+    ANDROID_CONTROL_AWB_STATE_INACTIVE                              , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_STATE_SEARCHING                             , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_STATE_CONVERGED                             , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_STATE_LOCKED                                , // HIDL v3.2\n+} camera_metadata_enum_android_control_awb_state_t;\n+\n+// ANDROID_CONTROL_AE_LOCK_AVAILABLE\n+typedef enum camera_metadata_enum_android_control_ae_lock_available {\n+    ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE                         , // HIDL v3.2\n+    ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE                          , // HIDL v3.2\n+} camera_metadata_enum_android_control_ae_lock_available_t;\n+\n+// ANDROID_CONTROL_AWB_LOCK_AVAILABLE\n+typedef enum camera_metadata_enum_android_control_awb_lock_available {\n+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE                        , // HIDL v3.2\n+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE                         , // HIDL v3.2\n+} camera_metadata_enum_android_control_awb_lock_available_t;\n+\n+// ANDROID_CONTROL_ENABLE_ZSL\n+typedef enum camera_metadata_enum_android_control_enable_zsl {\n+    ANDROID_CONTROL_ENABLE_ZSL_FALSE                                , // HIDL v3.2\n+    ANDROID_CONTROL_ENABLE_ZSL_TRUE                                 , // HIDL v3.2\n+} camera_metadata_enum_android_control_enable_zsl_t;\n+\n+// ANDROID_CONTROL_AF_SCENE_CHANGE\n+typedef enum camera_metadata_enum_android_control_af_scene_change {\n+    ANDROID_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED                    , // HIDL v3.3\n+    ANDROID_CONTROL_AF_SCENE_CHANGE_DETECTED                        , // HIDL v3.3\n+} camera_metadata_enum_android_control_af_scene_change_t;\n+\n+\n+// ANDROID_DEMOSAIC_MODE\n+typedef enum camera_metadata_enum_android_demosaic_mode {\n+    ANDROID_DEMOSAIC_MODE_FAST                                      , // HIDL v3.2\n+    ANDROID_DEMOSAIC_MODE_HIGH_QUALITY                              , // HIDL v3.2\n+} camera_metadata_enum_android_demosaic_mode_t;\n+\n+\n+// ANDROID_EDGE_MODE\n+typedef enum camera_metadata_enum_android_edge_mode {\n+    ANDROID_EDGE_MODE_OFF                                           , // HIDL v3.2\n+    ANDROID_EDGE_MODE_FAST                                          , // HIDL v3.2\n+    ANDROID_EDGE_MODE_HIGH_QUALITY                                  , // HIDL v3.2\n+    ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG                              , // HIDL v3.2\n+} camera_metadata_enum_android_edge_mode_t;\n+\n+\n+// ANDROID_FLASH_MODE\n+typedef enum camera_metadata_enum_android_flash_mode {\n+    ANDROID_FLASH_MODE_OFF                                          , // HIDL v3.2\n+    ANDROID_FLASH_MODE_SINGLE                                       , // HIDL v3.2\n+    ANDROID_FLASH_MODE_TORCH                                        , // HIDL v3.2\n+} camera_metadata_enum_android_flash_mode_t;\n+\n+// ANDROID_FLASH_STATE\n+typedef enum camera_metadata_enum_android_flash_state {\n+    ANDROID_FLASH_STATE_UNAVAILABLE                                 , // HIDL v3.2\n+    ANDROID_FLASH_STATE_CHARGING                                    , // HIDL v3.2\n+    ANDROID_FLASH_STATE_READY                                       , // HIDL v3.2\n+    ANDROID_FLASH_STATE_FIRED                                       , // HIDL v3.2\n+    ANDROID_FLASH_STATE_PARTIAL                                     , // HIDL v3.2\n+} camera_metadata_enum_android_flash_state_t;\n+\n+\n+// ANDROID_FLASH_INFO_AVAILABLE\n+typedef enum camera_metadata_enum_android_flash_info_available {\n+    ANDROID_FLASH_INFO_AVAILABLE_FALSE                              , // HIDL v3.2\n+    ANDROID_FLASH_INFO_AVAILABLE_TRUE                               , // HIDL v3.2\n+} camera_metadata_enum_android_flash_info_available_t;\n+\n+\n+// ANDROID_HOT_PIXEL_MODE\n+typedef enum camera_metadata_enum_android_hot_pixel_mode {\n+    ANDROID_HOT_PIXEL_MODE_OFF                                      , // HIDL v3.2\n+    ANDROID_HOT_PIXEL_MODE_FAST                                     , // HIDL v3.2\n+    ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY                             , // HIDL v3.2\n+} camera_metadata_enum_android_hot_pixel_mode_t;\n+\n+\n+\n+// ANDROID_LENS_OPTICAL_STABILIZATION_MODE\n+typedef enum camera_metadata_enum_android_lens_optical_stabilization_mode {\n+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF                     , // HIDL v3.2\n+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON                      , // HIDL v3.2\n+} camera_metadata_enum_android_lens_optical_stabilization_mode_t;\n+\n+// ANDROID_LENS_FACING\n+typedef enum camera_metadata_enum_android_lens_facing {\n+    ANDROID_LENS_FACING_FRONT                                       , // HIDL v3.2\n+    ANDROID_LENS_FACING_BACK                                        , // HIDL v3.2\n+    ANDROID_LENS_FACING_EXTERNAL                                    , // HIDL v3.2\n+} camera_metadata_enum_android_lens_facing_t;\n+\n+// ANDROID_LENS_STATE\n+typedef enum camera_metadata_enum_android_lens_state {\n+    ANDROID_LENS_STATE_STATIONARY                                   , // HIDL v3.2\n+    ANDROID_LENS_STATE_MOVING                                       , // HIDL v3.2\n+} camera_metadata_enum_android_lens_state_t;\n+\n+// ANDROID_LENS_POSE_REFERENCE\n+typedef enum camera_metadata_enum_android_lens_pose_reference {\n+    ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA                      , // HIDL v3.3\n+    ANDROID_LENS_POSE_REFERENCE_GYROSCOPE                           , // HIDL v3.3\n+} camera_metadata_enum_android_lens_pose_reference_t;\n+\n+\n+// ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION\n+typedef enum camera_metadata_enum_android_lens_info_focus_distance_calibration {\n+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED       , // HIDL v3.2\n+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE        , // HIDL v3.2\n+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED         , // HIDL v3.2\n+} camera_metadata_enum_android_lens_info_focus_distance_calibration_t;\n+\n+\n+// ANDROID_NOISE_REDUCTION_MODE\n+typedef enum camera_metadata_enum_android_noise_reduction_mode {\n+    ANDROID_NOISE_REDUCTION_MODE_OFF                                , // HIDL v3.2\n+    ANDROID_NOISE_REDUCTION_MODE_FAST                               , // HIDL v3.2\n+    ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY                       , // HIDL v3.2\n+    ANDROID_NOISE_REDUCTION_MODE_MINIMAL                            , // HIDL v3.2\n+    ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG                   , // HIDL v3.2\n+} camera_metadata_enum_android_noise_reduction_mode_t;\n+\n+\n+// ANDROID_QUIRKS_PARTIAL_RESULT\n+typedef enum camera_metadata_enum_android_quirks_partial_result {\n+    ANDROID_QUIRKS_PARTIAL_RESULT_FINAL                             , // HIDL v3.2\n+    ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL                           , // HIDL v3.2\n+} camera_metadata_enum_android_quirks_partial_result_t;\n+\n+\n+// ANDROID_REQUEST_METADATA_MODE\n+typedef enum camera_metadata_enum_android_request_metadata_mode {\n+    ANDROID_REQUEST_METADATA_MODE_NONE                              , // HIDL v3.2\n+    ANDROID_REQUEST_METADATA_MODE_FULL                              , // HIDL v3.2\n+} camera_metadata_enum_android_request_metadata_mode_t;\n+\n+// ANDROID_REQUEST_TYPE\n+typedef enum camera_metadata_enum_android_request_type {\n+    ANDROID_REQUEST_TYPE_CAPTURE                                    , // HIDL v3.2\n+    ANDROID_REQUEST_TYPE_REPROCESS                                  , // HIDL v3.2\n+} camera_metadata_enum_android_request_type_t;\n+\n+// ANDROID_REQUEST_AVAILABLE_CAPABILITIES\n+typedef enum camera_metadata_enum_android_request_available_capabilities {\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE      , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR            , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING   , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW                      , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING     , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS     , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE            , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING         , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT             , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO\n+                                                                     , // HIDL v3.2\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING          , // HIDL v3.3\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA     , // HIDL v3.3\n+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME               , // HIDL v3.3\n+} camera_metadata_enum_android_request_available_capabilities_t;\n+\n+\n+// ANDROID_SCALER_AVAILABLE_FORMATS\n+typedef enum camera_metadata_enum_android_scaler_available_formats {\n+    ANDROID_SCALER_AVAILABLE_FORMATS_RAW16                           = 0x20, // HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE                      = 0x24, // HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_FORMATS_YV12                            = 0x32315659, // HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP                    = 0x11, // HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED          = 0x22, // HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888                   = 0x23, // HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_FORMATS_BLOB                            = 0x21, // HIDL v3.2\n+} camera_metadata_enum_android_scaler_available_formats_t;\n+\n+// ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS\n+typedef enum camera_metadata_enum_android_scaler_available_stream_configurations {\n+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT           , // HIDL v3.2\n+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT            , // HIDL v3.2\n+} camera_metadata_enum_android_scaler_available_stream_configurations_t;\n+\n+// ANDROID_SCALER_CROPPING_TYPE\n+typedef enum camera_metadata_enum_android_scaler_cropping_type {\n+    ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY                        , // HIDL v3.2\n+    ANDROID_SCALER_CROPPING_TYPE_FREEFORM                           , // HIDL v3.2\n+} camera_metadata_enum_android_scaler_cropping_type_t;\n+\n+\n+// ANDROID_SENSOR_REFERENCE_ILLUMINANT1\n+typedef enum camera_metadata_enum_android_sensor_reference_illuminant1 {\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT                    = 1, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT                 = 2, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN                    = 3, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH                       = 4, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER                = 9, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER              = 10, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE                       = 11, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT        = 12, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT       = 13, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT      = 14, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT           = 15, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A                  = 17, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B                  = 18, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C                  = 19, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55                         = 20, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65                         = 21, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75                         = 22, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50                         = 23, // HIDL v3.2\n+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN         = 24, // HIDL v3.2\n+} camera_metadata_enum_android_sensor_reference_illuminant1_t;\n+\n+// ANDROID_SENSOR_TEST_PATTERN_MODE\n+typedef enum camera_metadata_enum_android_sensor_test_pattern_mode {\n+    ANDROID_SENSOR_TEST_PATTERN_MODE_OFF                            , // HIDL v3.2\n+    ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR                    , // HIDL v3.2\n+    ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS                     , // HIDL v3.2\n+    ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY        , // HIDL v3.2\n+    ANDROID_SENSOR_TEST_PATTERN_MODE_PN9                            , // HIDL v3.2\n+    ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1                         = 256, // HIDL v3.2\n+} camera_metadata_enum_android_sensor_test_pattern_mode_t;\n+\n+\n+// ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT\n+typedef enum camera_metadata_enum_android_sensor_info_color_filter_arrangement {\n+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB               , // HIDL v3.2\n+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG               , // HIDL v3.2\n+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG               , // HIDL v3.2\n+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR               , // HIDL v3.2\n+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB                , // HIDL v3.2\n+} camera_metadata_enum_android_sensor_info_color_filter_arrangement_t;\n+\n+// ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE\n+typedef enum camera_metadata_enum_android_sensor_info_timestamp_source {\n+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN                    , // HIDL v3.2\n+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME                   , // HIDL v3.2\n+} camera_metadata_enum_android_sensor_info_timestamp_source_t;\n+\n+// ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED\n+typedef enum camera_metadata_enum_android_sensor_info_lens_shading_applied {\n+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE                  , // HIDL v3.2\n+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE                   , // HIDL v3.2\n+} camera_metadata_enum_android_sensor_info_lens_shading_applied_t;\n+\n+\n+// ANDROID_SHADING_MODE\n+typedef enum camera_metadata_enum_android_shading_mode {\n+    ANDROID_SHADING_MODE_OFF                                        , // HIDL v3.2\n+    ANDROID_SHADING_MODE_FAST                                       , // HIDL v3.2\n+    ANDROID_SHADING_MODE_HIGH_QUALITY                               , // HIDL v3.2\n+} camera_metadata_enum_android_shading_mode_t;\n+\n+\n+// ANDROID_STATISTICS_FACE_DETECT_MODE\n+typedef enum camera_metadata_enum_android_statistics_face_detect_mode {\n+    ANDROID_STATISTICS_FACE_DETECT_MODE_OFF                         , // HIDL v3.2\n+    ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE                      , // HIDL v3.2\n+    ANDROID_STATISTICS_FACE_DETECT_MODE_FULL                        , // HIDL v3.2\n+} camera_metadata_enum_android_statistics_face_detect_mode_t;\n+\n+// ANDROID_STATISTICS_HISTOGRAM_MODE\n+typedef enum camera_metadata_enum_android_statistics_histogram_mode {\n+    ANDROID_STATISTICS_HISTOGRAM_MODE_OFF                           , // HIDL v3.2\n+    ANDROID_STATISTICS_HISTOGRAM_MODE_ON                            , // HIDL v3.2\n+} camera_metadata_enum_android_statistics_histogram_mode_t;\n+\n+// ANDROID_STATISTICS_SHARPNESS_MAP_MODE\n+typedef enum camera_metadata_enum_android_statistics_sharpness_map_mode {\n+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF                       , // HIDL v3.2\n+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE_ON                        , // HIDL v3.2\n+} camera_metadata_enum_android_statistics_sharpness_map_mode_t;\n+\n+// ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE\n+typedef enum camera_metadata_enum_android_statistics_hot_pixel_map_mode {\n+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF                       , // HIDL v3.2\n+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_ON                        , // HIDL v3.2\n+} camera_metadata_enum_android_statistics_hot_pixel_map_mode_t;\n+\n+// ANDROID_STATISTICS_SCENE_FLICKER\n+typedef enum camera_metadata_enum_android_statistics_scene_flicker {\n+    ANDROID_STATISTICS_SCENE_FLICKER_NONE                           , // HIDL v3.2\n+    ANDROID_STATISTICS_SCENE_FLICKER_50HZ                           , // HIDL v3.2\n+    ANDROID_STATISTICS_SCENE_FLICKER_60HZ                           , // HIDL v3.2\n+} camera_metadata_enum_android_statistics_scene_flicker_t;\n+\n+// ANDROID_STATISTICS_LENS_SHADING_MAP_MODE\n+typedef enum camera_metadata_enum_android_statistics_lens_shading_map_mode {\n+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF                    , // HIDL v3.2\n+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON                     , // HIDL v3.2\n+} camera_metadata_enum_android_statistics_lens_shading_map_mode_t;\n+\n+// ANDROID_STATISTICS_OIS_DATA_MODE\n+typedef enum camera_metadata_enum_android_statistics_ois_data_mode {\n+    ANDROID_STATISTICS_OIS_DATA_MODE_OFF                            , // HIDL v3.3\n+    ANDROID_STATISTICS_OIS_DATA_MODE_ON                             , // HIDL v3.3\n+} camera_metadata_enum_android_statistics_ois_data_mode_t;\n+\n+\n+\n+// ANDROID_TONEMAP_MODE\n+typedef enum camera_metadata_enum_android_tonemap_mode {\n+    ANDROID_TONEMAP_MODE_CONTRAST_CURVE                             , // HIDL v3.2\n+    ANDROID_TONEMAP_MODE_FAST                                       , // HIDL v3.2\n+    ANDROID_TONEMAP_MODE_HIGH_QUALITY                               , // HIDL v3.2\n+    ANDROID_TONEMAP_MODE_GAMMA_VALUE                                , // HIDL v3.2\n+    ANDROID_TONEMAP_MODE_PRESET_CURVE                               , // HIDL v3.2\n+} camera_metadata_enum_android_tonemap_mode_t;\n+\n+// ANDROID_TONEMAP_PRESET_CURVE\n+typedef enum camera_metadata_enum_android_tonemap_preset_curve {\n+    ANDROID_TONEMAP_PRESET_CURVE_SRGB                               , // HIDL v3.2\n+    ANDROID_TONEMAP_PRESET_CURVE_REC709                             , // HIDL v3.2\n+} camera_metadata_enum_android_tonemap_preset_curve_t;\n+\n+\n+// ANDROID_LED_TRANSMIT\n+typedef enum camera_metadata_enum_android_led_transmit {\n+    ANDROID_LED_TRANSMIT_OFF                                        , // HIDL v3.2\n+    ANDROID_LED_TRANSMIT_ON                                         , // HIDL v3.2\n+} camera_metadata_enum_android_led_transmit_t;\n+\n+// ANDROID_LED_AVAILABLE_LEDS\n+typedef enum camera_metadata_enum_android_led_available_leds {\n+    ANDROID_LED_AVAILABLE_LEDS_TRANSMIT                             , // HIDL v3.2\n+} camera_metadata_enum_android_led_available_leds_t;\n+\n+\n+// ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL\n+typedef enum camera_metadata_enum_android_info_supported_hardware_level {\n+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED                   , // HIDL v3.2\n+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL                      , // HIDL v3.2\n+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY                    , // HIDL v3.2\n+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3                         , // HIDL v3.2\n+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL                  , // HIDL v3.3\n+} camera_metadata_enum_android_info_supported_hardware_level_t;\n+\n+\n+// ANDROID_BLACK_LEVEL_LOCK\n+typedef enum camera_metadata_enum_android_black_level_lock {\n+    ANDROID_BLACK_LEVEL_LOCK_OFF                                    , // HIDL v3.2\n+    ANDROID_BLACK_LEVEL_LOCK_ON                                     , // HIDL v3.2\n+} camera_metadata_enum_android_black_level_lock_t;\n+\n+\n+// ANDROID_SYNC_FRAME_NUMBER\n+typedef enum camera_metadata_enum_android_sync_frame_number {\n+    ANDROID_SYNC_FRAME_NUMBER_CONVERGING                             = -1, // HIDL v3.2\n+    ANDROID_SYNC_FRAME_NUMBER_UNKNOWN                                = -2, // HIDL v3.2\n+} camera_metadata_enum_android_sync_frame_number_t;\n+\n+// ANDROID_SYNC_MAX_LATENCY\n+typedef enum camera_metadata_enum_android_sync_max_latency {\n+    ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL                       = 0, // HIDL v3.2\n+    ANDROID_SYNC_MAX_LATENCY_UNKNOWN                                 = -1, // HIDL v3.2\n+} camera_metadata_enum_android_sync_max_latency_t;\n+\n+\n+\n+// ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS\n+typedef enum camera_metadata_enum_android_depth_available_depth_stream_configurations {\n+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT      , // HIDL v3.2\n+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT       , // HIDL v3.2\n+} camera_metadata_enum_android_depth_available_depth_stream_configurations_t;\n+\n+// ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE\n+typedef enum camera_metadata_enum_android_depth_depth_is_exclusive {\n+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE                          , // HIDL v3.2\n+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE                           , // HIDL v3.2\n+} camera_metadata_enum_android_depth_depth_is_exclusive_t;\n+\n+\n+// ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE\n+typedef enum camera_metadata_enum_android_logical_multi_camera_sensor_sync_type {\n+    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE       , // HIDL v3.3\n+    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED        , // HIDL v3.3\n+} camera_metadata_enum_android_logical_multi_camera_sensor_sync_type_t;\n+\n+\n+// ANDROID_DISTORTION_CORRECTION_MODE\n+typedef enum camera_metadata_enum_android_distortion_correction_mode {\n+    ANDROID_DISTORTION_CORRECTION_MODE_OFF                          , // HIDL v3.3\n+    ANDROID_DISTORTION_CORRECTION_MODE_FAST                         , // HIDL v3.3\n+    ANDROID_DISTORTION_CORRECTION_MODE_HIGH_QUALITY                 , // HIDL v3.3\n+} camera_metadata_enum_android_distortion_correction_mode_t;\n+\n+\ndiff --git a/include/android/metadata/system/camera_vendor_tags.h b/include/android/metadata/system/camera_vendor_tags.h\nnew file mode 100644\nindex 000000000000..0bb542608c91\n--- /dev/null\n+++ b/include/android/metadata/system/camera_vendor_tags.h\n@@ -0,0 +1,158 @@\n+/*\n+ * Copyright 2014 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H\n+#define SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H\n+\n+#ifdef __cplusplus\n+extern \"C\" {\n+#endif\n+\n+#define CAMERA_METADATA_VENDOR_TAG_BOUNDARY 0x80000000u\n+#define CAMERA_METADATA_INVALID_VENDOR_ID UINT64_MAX\n+\n+typedef uint64_t metadata_vendor_id_t;\n+\n+/**\n+ * Vendor tags:\n+ *\n+ * This structure contains basic functions for enumerating an immutable set of\n+ * vendor-defined camera metadata tags, and querying static information about\n+ * their structure/type.  The intended use of this information is to validate\n+ * the structure of metadata returned by the camera HAL, and to allow vendor-\n+ * defined metadata tags to be visible in application facing camera API.\n+ */\n+typedef struct vendor_tag_ops vendor_tag_ops_t;\n+struct vendor_tag_ops {\n+    /**\n+     * Get the number of vendor tags supported on this platform. Used to\n+     * calculate the size of buffer needed for holding the array of all tags\n+     * returned by get_all_tags().  This must return -1 on error.\n+     */\n+    int (*get_tag_count)(const vendor_tag_ops_t *v);\n+\n+    /**\n+     * Fill an array with all of the supported vendor tags on this platform.\n+     * get_tag_count() must return the number of tags supported, and\n+     * tag_array will be allocated with enough space to hold the number of tags\n+     * returned by get_tag_count().\n+     */\n+    void (*get_all_tags)(const vendor_tag_ops_t *v, uint32_t *tag_array);\n+\n+    /**\n+     * Get the vendor section name for a vendor-specified entry tag. This will\n+     * only be called for vendor-defined tags.\n+     *\n+     * The naming convention for the vendor-specific section names should\n+     * follow a style similar to the Java package style.  For example,\n+     * CameraZoom Inc. must prefix their sections with \"com.camerazoom.\"\n+     * This must return NULL if the tag is outside the bounds of\n+     * vendor-defined sections.\n+     *\n+     * There may be different vendor-defined tag sections, for example the\n+     * phone maker, the chipset maker, and the camera module maker may each\n+     * have their own \"com.vendor.\"-prefixed section.\n+     *\n+     * The memory pointed to by the return value must remain valid for the\n+     * lifetime of the module, and is owned by the module.\n+     */\n+    const char *(*get_section_name)(const vendor_tag_ops_t *v, uint32_t tag);\n+\n+    /**\n+     * Get the tag name for a vendor-specified entry tag. This is only called\n+     * for vendor-defined tags, and must return NULL if it is not a\n+     * vendor-defined tag.\n+     *\n+     * The memory pointed to by the return value must remain valid for the\n+     * lifetime of the module, and is owned by the module.\n+     */\n+    const char *(*get_tag_name)(const vendor_tag_ops_t *v, uint32_t tag);\n+\n+    /**\n+     * Get tag type for a vendor-specified entry tag. The type returned must be\n+     * a valid type defined in camera_metadata.h.  This method is only called\n+     * for tags >= CAMERA_METADATA_VENDOR_TAG_BOUNDARY, and must return\n+     * -1 if the tag is outside the bounds of the vendor-defined sections.\n+     */\n+    int (*get_tag_type)(const vendor_tag_ops_t *v, uint32_t tag);\n+\n+    /* Reserved for future use.  These must be initialized to NULL. */\n+    void* reserved[8];\n+};\n+\n+struct vendor_tag_cache_ops {\n+    /**\n+     * Get the number of vendor tags supported on this platform. Used to\n+     * calculate the size of buffer needed for holding the array of all tags\n+     * returned by get_all_tags().  This must return -1 on error.\n+     */\n+    int (*get_tag_count)(metadata_vendor_id_t id);\n+\n+    /**\n+     * Fill an array with all of the supported vendor tags on this platform.\n+     * get_tag_count() must return the number of tags supported, and\n+     * tag_array will be allocated with enough space to hold the number of tags\n+     * returned by get_tag_count().\n+     */\n+    void (*get_all_tags)(uint32_t *tag_array, metadata_vendor_id_t id);\n+\n+    /**\n+     * Get the vendor section name for a vendor-specified entry tag. This will\n+     * only be called for vendor-defined tags.\n+     *\n+     * The naming convention for the vendor-specific section names should\n+     * follow a style similar to the Java package style.  For example,\n+     * CameraZoom Inc. must prefix their sections with \"com.camerazoom.\"\n+     * This must return NULL if the tag is outside the bounds of\n+     * vendor-defined sections.\n+     *\n+     * There may be different vendor-defined tag sections, for example the\n+     * phone maker, the chipset maker, and the camera module maker may each\n+     * have their own \"com.vendor.\"-prefixed section.\n+     *\n+     * The memory pointed to by the return value must remain valid for the\n+     * lifetime of the module, and is owned by the module.\n+     */\n+    const char *(*get_section_name)(uint32_t tag, metadata_vendor_id_t id);\n+\n+    /**\n+     * Get the tag name for a vendor-specified entry tag. This is only called\n+     * for vendor-defined tags, and must return NULL if it is not a\n+     * vendor-defined tag.\n+     *\n+     * The memory pointed to by the return value must remain valid for the\n+     * lifetime of the module, and is owned by the module.\n+     */\n+    const char *(*get_tag_name)(uint32_t tag, metadata_vendor_id_t id);\n+\n+    /**\n+     * Get tag type for a vendor-specified entry tag. The type returned must be\n+     * a valid type defined in camera_metadata.h.  This method is only called\n+     * for tags >= CAMERA_METADATA_VENDOR_TAG_BOUNDARY, and must return\n+     * -1 if the tag is outside the bounds of the vendor-defined sections.\n+     */\n+    int (*get_tag_type)(uint32_t tag, metadata_vendor_id_t id);\n+\n+    /* Reserved for future use.  These must be initialized to NULL. */\n+    void* reserved[8];\n+};\n+\n+#ifdef __cplusplus\n+} /* extern \"C\" */\n+#endif\n+\n+#endif /* SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H */\n+\ndiff --git a/include/android/system/core/include/android/log.h b/include/android/system/core/include/android/log.h\nnew file mode 100644\nindex 000000000000..1c171b7bf2b5\n--- /dev/null\n+++ b/include/android/system/core/include/android/log.h\n@@ -0,0 +1,144 @@\n+/*\n+ * Copyright (C) 2009 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef _ANDROID_LOG_H\n+#define _ANDROID_LOG_H\n+\n+/******************************************************************\n+ *\n+ * IMPORTANT NOTICE:\n+ *\n+ *   This file is part of Android's set of stable system headers\n+ *   exposed by the Android NDK (Native Development Kit) since\n+ *   platform release 1.5\n+ *\n+ *   Third-party source AND binary code relies on the definitions\n+ *   here to be FROZEN ON ALL UPCOMING PLATFORM RELEASES.\n+ *\n+ *   - DO NOT MODIFY ENUMS (EXCEPT IF YOU ADD NEW 32-BIT VALUES)\n+ *   - DO NOT MODIFY CONSTANTS OR FUNCTIONAL MACROS\n+ *   - DO NOT CHANGE THE SIGNATURE OF FUNCTIONS IN ANY WAY\n+ *   - DO NOT CHANGE THE LAYOUT OR SIZE OF STRUCTURES\n+ */\n+\n+/*\n+ * Support routines to send messages to the Android in-kernel log buffer,\n+ * which can later be accessed through the 'logcat' utility.\n+ *\n+ * Each log message must have\n+ *   - a priority\n+ *   - a log tag\n+ *   - some text\n+ *\n+ * The tag normally corresponds to the component that emits the log message,\n+ * and should be reasonably small.\n+ *\n+ * Log message text may be truncated to less than an implementation-specific\n+ * limit (e.g. 1023 characters max).\n+ *\n+ * Note that a newline character (\"\\n\") will be appended automatically to your\n+ * log message, if not already there. It is not possible to send several messages\n+ * and have them appear on a single line in logcat.\n+ *\n+ * PLEASE USE LOGS WITH MODERATION:\n+ *\n+ *  - Sending log messages eats CPU and slow down your application and the\n+ *    system.\n+ *\n+ *  - The circular log buffer is pretty small (<64KB), sending many messages\n+ *    might push off other important log messages from the rest of the system.\n+ *\n+ *  - In release builds, only send log messages to account for exceptional\n+ *    conditions.\n+ *\n+ * NOTE: These functions MUST be implemented by /system/lib/liblog.so\n+ */\n+\n+#include <stdarg.h>\n+\n+#ifdef __cplusplus\n+extern \"C\" {\n+#endif\n+\n+/*\n+ * Android log priority values, in ascending priority order.\n+ */\n+typedef enum android_LogPriority {\n+    ANDROID_LOG_UNKNOWN = 0,\n+    ANDROID_LOG_DEFAULT,    /* only for SetMinPriority() */\n+    ANDROID_LOG_VERBOSE,\n+    ANDROID_LOG_DEBUG,\n+    ANDROID_LOG_INFO,\n+    ANDROID_LOG_WARN,\n+    ANDROID_LOG_ERROR,\n+    ANDROID_LOG_FATAL,\n+    ANDROID_LOG_SILENT,     /* only for SetMinPriority(); must be last */\n+} android_LogPriority;\n+\n+/*\n+ * Send a simple string to the log.\n+ */\n+int __android_log_write(int prio, const char *tag, const char *text);\n+\n+/*\n+ * Send a formatted string to the log, used like printf(fmt,...)\n+ */\n+int __android_log_print(int prio, const char *tag,  const char *fmt, ...)\n+#if defined(__GNUC__)\n+#ifdef __USE_MINGW_ANSI_STDIO\n+#if __USE_MINGW_ANSI_STDIO\n+    __attribute__ ((format(gnu_printf, 3, 4)))\n+#else\n+    __attribute__ ((format(printf, 3, 4)))\n+#endif\n+#else\n+    __attribute__ ((format(printf, 3, 4)))\n+#endif\n+#endif\n+    ;\n+\n+/*\n+ * A variant of __android_log_print() that takes a va_list to list\n+ * additional parameters.\n+ */\n+int __android_log_vprint(int prio, const char *tag,\n+                         const char *fmt, va_list ap);\n+\n+/*\n+ * Log an assertion failure and abort the process to have a chance\n+ * to inspect it if a debugger is attached. This uses the FATAL priority.\n+ */\n+void __android_log_assert(const char *cond, const char *tag,\n+                          const char *fmt, ...)\n+#if defined(__GNUC__)\n+    __attribute__ ((noreturn))\n+#ifdef __USE_MINGW_ANSI_STDIO\n+#if __USE_MINGW_ANSI_STDIO\n+    __attribute__ ((format(gnu_printf, 3, 4)))\n+#else\n+    __attribute__ ((format(printf, 3, 4)))\n+#endif\n+#else\n+    __attribute__ ((format(printf, 3, 4)))\n+#endif\n+#endif\n+    ;\n+\n+#ifdef __cplusplus\n+}\n+#endif\n+\n+#endif /* _ANDROID_LOG_H */\ndiff --git a/include/android/system/core/include/cutils/compiler.h b/include/android/system/core/include/cutils/compiler.h\nnew file mode 100644\nindex 000000000000..70f884a1e701\n--- /dev/null\n+++ b/include/android/system/core/include/cutils/compiler.h\n@@ -0,0 +1,44 @@\n+/*\n+ * Copyright (C) 2009 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef ANDROID_CUTILS_COMPILER_H\n+#define ANDROID_CUTILS_COMPILER_H\n+\n+/*\n+ * helps the compiler's optimizer predicting branches\n+ */\n+\n+#ifdef __cplusplus\n+#   define CC_LIKELY( exp )    (__builtin_expect( !!(exp), true ))\n+#   define CC_UNLIKELY( exp )  (__builtin_expect( !!(exp), false ))\n+#else\n+#   define CC_LIKELY( exp )    (__builtin_expect( !!(exp), 1 ))\n+#   define CC_UNLIKELY( exp )  (__builtin_expect( !!(exp), 0 ))\n+#endif\n+\n+/**\n+ * exports marked symbols\n+ *\n+ * if used on a C++ class declaration, this macro must be inserted\n+ * after the \"class\" keyword. For instance:\n+ *\n+ * template <typename TYPE>\n+ * class ANDROID_API Singleton { }\n+ */\n+\n+#define ANDROID_API __attribute__((visibility(\"default\")))\n+\n+#endif // ANDROID_CUTILS_COMPILER_H\ndiff --git a/include/android/system/core/include/cutils/native_handle.h b/include/android/system/core/include/cutils/native_handle.h\nnew file mode 100644\nindex 000000000000..268c5d3f51b7\n--- /dev/null\n+++ b/include/android/system/core/include/cutils/native_handle.h\n@@ -0,0 +1,69 @@\n+/*\n+ * Copyright (C) 2009 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef NATIVE_HANDLE_H_\n+#define NATIVE_HANDLE_H_\n+\n+#ifdef __cplusplus\n+extern \"C\" {\n+#endif\n+\n+typedef struct native_handle\n+{\n+    int version;        /* sizeof(native_handle_t) */\n+    int numFds;         /* number of file-descriptors at &data[0] */\n+    int numInts;        /* number of ints at &data[numFds] */\n+    int data[0];        /* numFds + numInts ints */\n+} native_handle_t;\n+\n+/*\n+ * native_handle_close\n+ * \n+ * closes the file descriptors contained in this native_handle_t\n+ * \n+ * return 0 on success, or a negative error code on failure\n+ * \n+ */\n+int native_handle_close(const native_handle_t* h);\n+\n+\n+/*\n+ * native_handle_create\n+ * \n+ * creates a native_handle_t and initializes it. must be destroyed with\n+ * native_handle_delete().\n+ * \n+ */\n+native_handle_t* native_handle_create(int numFds, int numInts);\n+\n+/*\n+ * native_handle_delete\n+ * \n+ * frees a native_handle_t allocated with native_handle_create().\n+ * This ONLY frees the memory allocated for the native_handle_t, but doesn't\n+ * close the file descriptors; which can be achieved with native_handle_close().\n+ * \n+ * return 0 on success, or a negative error code on failure\n+ * \n+ */\n+int native_handle_delete(native_handle_t* h);\n+\n+\n+#ifdef __cplusplus\n+}\n+#endif\n+\n+#endif /* NATIVE_HANDLE_H_ */\ndiff --git a/include/android/system/core/include/system/camera.h b/include/android/system/core/include/system/camera.h\nnew file mode 100644\nindex 000000000000..5d0873ac42a6\n--- /dev/null\n+++ b/include/android/system/core/include/system/camera.h\n@@ -0,0 +1,298 @@\n+/*\n+ * Copyright (C) 2011 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef SYSTEM_CORE_INCLUDE_ANDROID_CAMERA_H\n+#define SYSTEM_CORE_INCLUDE_ANDROID_CAMERA_H\n+\n+#include <stdint.h>\n+#include <sys/cdefs.h>\n+#include <sys/types.h>\n+#include <cutils/native_handle.h>\n+#include <hardware/hardware.h>\n+#include <hardware/gralloc.h>\n+\n+__BEGIN_DECLS\n+\n+/**\n+ * A set of bit masks for specifying how the received preview frames are\n+ * handled before the previewCallback() call.\n+ *\n+ * The least significant 3 bits of an \"int\" value are used for this purpose:\n+ *\n+ * ..... 0 0 0\n+ *       ^ ^ ^\n+ *       | | |---------> determine whether the callback is enabled or not\n+ *       | |-----------> determine whether the callback is one-shot or not\n+ *       |-------------> determine whether the frame is copied out or not\n+ *\n+ * WARNING: When a frame is sent directly without copying, it is the frame\n+ * receiver's responsiblity to make sure that the frame data won't get\n+ * corrupted by subsequent preview frames filled by the camera. This flag is\n+ * recommended only when copying out data brings significant performance price\n+ * and the handling/processing of the received frame data is always faster than\n+ * the preview frame rate so that data corruption won't occur.\n+ *\n+ * For instance,\n+ * 1. 0x00 disables the callback. In this case, copy out and one shot bits\n+ *    are ignored.\n+ * 2. 0x01 enables a callback without copying out the received frames. A\n+ *    typical use case is the Camcorder application to avoid making costly\n+ *    frame copies.\n+ * 3. 0x05 is enabling a callback with frame copied out repeatedly. A typical\n+ *    use case is the Camera application.\n+ * 4. 0x07 is enabling a callback with frame copied out only once. A typical\n+ *    use case is the Barcode scanner application.\n+ */\n+\n+enum {\n+    CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK = 0x01,\n+    CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK = 0x02,\n+    CAMERA_FRAME_CALLBACK_FLAG_COPY_OUT_MASK = 0x04,\n+    /** Typical use cases */\n+    CAMERA_FRAME_CALLBACK_FLAG_NOOP = 0x00,\n+    CAMERA_FRAME_CALLBACK_FLAG_CAMCORDER = 0x01,\n+    CAMERA_FRAME_CALLBACK_FLAG_CAMERA = 0x05,\n+    CAMERA_FRAME_CALLBACK_FLAG_BARCODE_SCANNER = 0x07\n+};\n+\n+/** msgType in notifyCallback and dataCallback functions */\n+enum {\n+    CAMERA_MSG_ERROR = 0x0001,            // notifyCallback\n+    CAMERA_MSG_SHUTTER = 0x0002,          // notifyCallback\n+    CAMERA_MSG_FOCUS = 0x0004,            // notifyCallback\n+    CAMERA_MSG_ZOOM = 0x0008,             // notifyCallback\n+    CAMERA_MSG_PREVIEW_FRAME = 0x0010,    // dataCallback\n+    CAMERA_MSG_VIDEO_FRAME = 0x0020,      // data_timestamp_callback\n+    CAMERA_MSG_POSTVIEW_FRAME = 0x0040,   // dataCallback\n+    CAMERA_MSG_RAW_IMAGE = 0x0080,        // dataCallback\n+    CAMERA_MSG_COMPRESSED_IMAGE = 0x0100, // dataCallback\n+    CAMERA_MSG_RAW_IMAGE_NOTIFY = 0x0200, // dataCallback\n+    // Preview frame metadata. This can be combined with\n+    // CAMERA_MSG_PREVIEW_FRAME in dataCallback. For example, the apps can\n+    // request FRAME and METADATA. Or the apps can request only FRAME or only\n+    // METADATA.\n+    CAMERA_MSG_PREVIEW_METADATA = 0x0400, // dataCallback\n+    // Notify on autofocus start and stop. This is useful in continuous\n+    // autofocus - FOCUS_MODE_CONTINUOUS_VIDEO and FOCUS_MODE_CONTINUOUS_PICTURE.\n+    CAMERA_MSG_FOCUS_MOVE = 0x0800,       // notifyCallback\n+    CAMERA_MSG_ALL_MSGS = 0xFFFF\n+};\n+\n+/** cmdType in sendCommand functions */\n+enum {\n+    CAMERA_CMD_START_SMOOTH_ZOOM = 1,\n+    CAMERA_CMD_STOP_SMOOTH_ZOOM = 2,\n+\n+    /**\n+     * Set the clockwise rotation of preview display (setPreviewDisplay) in\n+     * degrees. This affects the preview frames and the picture displayed after\n+     * snapshot. This method is useful for portrait mode applications. Note\n+     * that preview display of front-facing cameras is flipped horizontally\n+     * before the rotation, that is, the image is reflected along the central\n+     * vertical axis of the camera sensor. So the users can see themselves as\n+     * looking into a mirror.\n+     *\n+     * This does not affect the order of byte array of\n+     * CAMERA_MSG_PREVIEW_FRAME, CAMERA_MSG_VIDEO_FRAME,\n+     * CAMERA_MSG_POSTVIEW_FRAME, CAMERA_MSG_RAW_IMAGE, or\n+     * CAMERA_MSG_COMPRESSED_IMAGE. This is allowed to be set during preview\n+     * since API level 14.\n+     */\n+    CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3,\n+\n+    /**\n+     * cmdType to disable/enable shutter sound. In sendCommand passing arg1 =\n+     * 0 will disable, while passing arg1 = 1 will enable the shutter sound.\n+     */\n+    CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4,\n+\n+    /* cmdType to play recording sound */\n+    CAMERA_CMD_PLAY_RECORDING_SOUND = 5,\n+\n+    /**\n+     * Start the face detection. This should be called after preview is started.\n+     * The camera will notify the listener of CAMERA_MSG_FACE and the detected\n+     * faces in the preview frame. The detected faces may be the same as the\n+     * previous ones. Apps should call CAMERA_CMD_STOP_FACE_DETECTION to stop\n+     * the face detection. This method is supported if CameraParameters\n+     * KEY_MAX_NUM_HW_DETECTED_FACES or KEY_MAX_NUM_SW_DETECTED_FACES is\n+     * bigger than 0. Hardware and software face detection should not be running\n+     * at the same time. If the face detection has started, apps should not send\n+     * this again.\n+     *\n+     * In hardware face detection mode, CameraParameters KEY_WHITE_BALANCE,\n+     * KEY_FOCUS_AREAS and KEY_METERING_AREAS have no effect.\n+     *\n+     * arg1 is the face detection type. It can be CAMERA_FACE_DETECTION_HW or\n+     * CAMERA_FACE_DETECTION_SW. If the type of face detection requested is not\n+     * supported, the HAL must return BAD_VALUE.\n+     */\n+    CAMERA_CMD_START_FACE_DETECTION = 6,\n+\n+    /**\n+     * Stop the face detection.\n+     */\n+    CAMERA_CMD_STOP_FACE_DETECTION = 7,\n+\n+    /**\n+     * Enable/disable focus move callback (CAMERA_MSG_FOCUS_MOVE). Passing\n+     * arg1 = 0 will disable, while passing arg1 = 1 will enable the callback.\n+     */\n+    CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG = 8,\n+\n+    /**\n+     * Ping camera service to see if camera hardware is released.\n+     *\n+     * When any camera method returns error, the client can use ping command\n+     * to see if the camera has been taken away by other clients. If the result\n+     * is NO_ERROR, it means the camera hardware is not released. If the result\n+     * is not NO_ERROR, the camera has been released and the existing client\n+     * can silently finish itself or show a dialog.\n+     */\n+    CAMERA_CMD_PING = 9,\n+\n+    /**\n+     * Configure the number of video buffers used for recording. The intended\n+     * video buffer count for recording is passed as arg1, which must be\n+     * greater than 0. This command must be sent before recording is started.\n+     * This command returns INVALID_OPERATION error if it is sent after video\n+     * recording is started, or the command is not supported at all. This\n+     * command also returns a BAD_VALUE error if the intended video buffer\n+     * count is non-positive or too big to be realized.\n+     */\n+    CAMERA_CMD_SET_VIDEO_BUFFER_COUNT = 10,\n+\n+    /**\n+     * Configure an explicit format to use for video recording metadata mode.\n+     * This can be used to switch the format from the\n+     * default IMPLEMENTATION_DEFINED gralloc format to some other\n+     * device-supported format, and the default dataspace from the BT_709 color\n+     * space to some other device-supported dataspace. arg1 is the HAL pixel\n+     * format, and arg2 is the HAL dataSpace. This command returns\n+     * INVALID_OPERATION error if it is sent after video recording is started,\n+     * or the command is not supported at all.\n+     *\n+     * If the gralloc format is set to a format other than\n+     * IMPLEMENTATION_DEFINED, then HALv3 devices will use gralloc usage flags\n+     * of SW_READ_OFTEN.\n+     */\n+    CAMERA_CMD_SET_VIDEO_FORMAT = 11\n+};\n+\n+/** camera fatal errors */\n+enum {\n+    CAMERA_ERROR_UNKNOWN = 1,\n+    /**\n+     * Camera was released because another client has connected to the camera.\n+     * The original client should call Camera::disconnect immediately after\n+     * getting this notification. Otherwise, the camera will be released by\n+     * camera service in a short time. The client should not call any method\n+     * (except disconnect and sending CAMERA_CMD_PING) after getting this.\n+     */\n+    CAMERA_ERROR_RELEASED = 2,\n+    CAMERA_ERROR_SERVER_DIED = 100\n+};\n+\n+enum {\n+    /** The facing of the camera is opposite to that of the screen. */\n+    CAMERA_FACING_BACK = 0,\n+    /** The facing of the camera is the same as that of the screen. */\n+    CAMERA_FACING_FRONT = 1,\n+    /**\n+     * The facing of the camera is not fixed relative to the screen.\n+     * The cameras with this facing are external cameras, e.g. USB cameras.\n+     */\n+    CAMERA_FACING_EXTERNAL = 2\n+};\n+\n+enum {\n+    /** Hardware face detection. It does not use much CPU. */\n+    CAMERA_FACE_DETECTION_HW = 0,\n+    /**\n+     * Software face detection. It uses some CPU. Applications must use\n+     * Camera.setPreviewTexture for preview in this mode.\n+     */\n+    CAMERA_FACE_DETECTION_SW = 1\n+};\n+\n+/**\n+ * The information of a face from camera face detection.\n+ */\n+typedef struct camera_face {\n+    /**\n+     * Bounds of the face [left, top, right, bottom]. (-1000, -1000) represents\n+     * the top-left of the camera field of view, and (1000, 1000) represents the\n+     * bottom-right of the field of view. The width and height cannot be 0 or\n+     * negative. This is supported by both hardware and software face detection.\n+     *\n+     * The direction is relative to the sensor orientation, that is, what the\n+     * sensor sees. The direction is not affected by the rotation or mirroring\n+     * of CAMERA_CMD_SET_DISPLAY_ORIENTATION.\n+     */\n+    int32_t rect[4];\n+\n+    /**\n+     * The confidence level of the face. The range is 1 to 100. 100 is the\n+     * highest confidence. This is supported by both hardware and software\n+     * face detection.\n+     */\n+    int32_t score;\n+\n+    /**\n+     * An unique id per face while the face is visible to the tracker. If\n+     * the face leaves the field-of-view and comes back, it will get a new\n+     * id. If the value is 0, id is not supported.\n+     */\n+    int32_t id;\n+\n+    /**\n+     * The coordinates of the center of the left eye. The range is -1000 to\n+     * 1000. -2000, -2000 if this is not supported.\n+     */\n+    int32_t left_eye[2];\n+\n+    /**\n+     * The coordinates of the center of the right eye. The range is -1000 to\n+     * 1000. -2000, -2000 if this is not supported.\n+     */\n+    int32_t right_eye[2];\n+\n+    /**\n+     * The coordinates of the center of the mouth. The range is -1000 to 1000.\n+     * -2000, -2000 if this is not supported.\n+     */\n+    int32_t mouth[2];\n+\n+} camera_face_t;\n+\n+/**\n+ * The metadata of the frame data.\n+ */\n+typedef struct camera_frame_metadata {\n+    /**\n+     * The number of detected faces in the frame.\n+     */\n+    int32_t number_of_faces;\n+\n+    /**\n+     * An array of the detected faces. The length is number_of_faces.\n+     */\n+    camera_face_t *faces;\n+} camera_frame_metadata_t;\n+\n+__END_DECLS\n+\n+#endif /* SYSTEM_CORE_INCLUDE_ANDROID_CAMERA_H */\ndiff --git a/include/android/system/core/include/system/graphics.h b/include/android/system/core/include/system/graphics.h\nnew file mode 100644\nindex 000000000000..afd9f7bdb32f\n--- /dev/null\n+++ b/include/android/system/core/include/system/graphics.h\n@@ -0,0 +1,763 @@\n+/*\n+ * Copyright (C) 2011 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef SYSTEM_CORE_INCLUDE_ANDROID_GRAPHICS_H\n+#define SYSTEM_CORE_INCLUDE_ANDROID_GRAPHICS_H\n+\n+#include <stdint.h>\n+\n+#ifdef __cplusplus\n+extern \"C\" {\n+#endif\n+\n+/*\n+ * If the HAL needs to create service threads to handle graphics related\n+ * tasks, these threads need to run at HAL_PRIORITY_URGENT_DISPLAY priority\n+ * if they can block the main rendering thread in any way.\n+ *\n+ * the priority of the current thread can be set with:\n+ *\n+ *      #include <sys/resource.h>\n+ *      setpriority(PRIO_PROCESS, 0, HAL_PRIORITY_URGENT_DISPLAY);\n+ *\n+ */\n+\n+#define HAL_PRIORITY_URGENT_DISPLAY     (-8)\n+\n+/**\n+ * pixel format definitions\n+ */\n+\n+enum {\n+    /*\n+     * \"linear\" color pixel formats:\n+     *\n+     * When used with ANativeWindow, the dataSpace field describes the color\n+     * space of the buffer.\n+     *\n+     * The color space determines, for example, if the formats are linear or\n+     * gamma-corrected; or whether any special operations are performed when\n+     * reading or writing into a buffer in one of these formats.\n+     */\n+    HAL_PIXEL_FORMAT_RGBA_8888          = 1,\n+    HAL_PIXEL_FORMAT_RGBX_8888          = 2,\n+    HAL_PIXEL_FORMAT_RGB_888            = 3,\n+    HAL_PIXEL_FORMAT_RGB_565            = 4,\n+    HAL_PIXEL_FORMAT_BGRA_8888          = 5,\n+\n+    /*\n+     * 0x100 - 0x1FF\n+     *\n+     * This range is reserved for pixel formats that are specific to the HAL\n+     * implementation.  Implementations can use any value in this range to\n+     * communicate video pixel formats between their HAL modules.  These formats\n+     * must not have an alpha channel.  Additionally, an EGLimage created from a\n+     * gralloc buffer of one of these formats must be supported for use with the\n+     * GL_OES_EGL_image_external OpenGL ES extension.\n+     */\n+\n+    /*\n+     * Android YUV format:\n+     *\n+     * This format is exposed outside of the HAL to software decoders and\n+     * applications.  EGLImageKHR must support it in conjunction with the\n+     * OES_EGL_image_external extension.\n+     *\n+     * YV12 is a 4:2:0 YCrCb planar format comprised of a WxH Y plane followed\n+     * by (W/2) x (H/2) Cr and Cb planes.\n+     *\n+     * This format assumes\n+     * - an even width\n+     * - an even height\n+     * - a horizontal stride multiple of 16 pixels\n+     * - a vertical stride equal to the height\n+     *\n+     *   y_size = stride * height\n+     *   c_stride = ALIGN(stride/2, 16)\n+     *   c_size = c_stride * height/2\n+     *   size = y_size + c_size * 2\n+     *   cr_offset = y_size\n+     *   cb_offset = y_size + c_size\n+     *\n+     * When used with ANativeWindow, the dataSpace field describes the color\n+     * space of the buffer.\n+     */\n+    HAL_PIXEL_FORMAT_YV12   = 0x32315659, // YCrCb 4:2:0 Planar\n+\n+\n+    /*\n+     * Android Y8 format:\n+     *\n+     * This format is exposed outside of the HAL to the framework.\n+     * The expected gralloc usage flags are SW_* and HW_CAMERA_*,\n+     * and no other HW_ flags will be used.\n+     *\n+     * Y8 is a YUV planar format comprised of a WxH Y plane,\n+     * with each pixel being represented by 8 bits.\n+     *\n+     * It is equivalent to just the Y plane from YV12.\n+     *\n+     * This format assumes\n+     * - an even width\n+     * - an even height\n+     * - a horizontal stride multiple of 16 pixels\n+     * - a vertical stride equal to the height\n+     *\n+     *   size = stride * height\n+     *\n+     * When used with ANativeWindow, the dataSpace field describes the color\n+     * space of the buffer.\n+     */\n+    HAL_PIXEL_FORMAT_Y8     = 0x20203859,\n+\n+    /*\n+     * Android Y16 format:\n+     *\n+     * This format is exposed outside of the HAL to the framework.\n+     * The expected gralloc usage flags are SW_* and HW_CAMERA_*,\n+     * and no other HW_ flags will be used.\n+     *\n+     * Y16 is a YUV planar format comprised of a WxH Y plane,\n+     * with each pixel being represented by 16 bits.\n+     *\n+     * It is just like Y8, but has double the bits per pixel (little endian).\n+     *\n+     * This format assumes\n+     * - an even width\n+     * - an even height\n+     * - a horizontal stride multiple of 16 pixels\n+     * - a vertical stride equal to the height\n+     * - strides are specified in pixels, not in bytes\n+     *\n+     *   size = stride * height * 2\n+     *\n+     * When used with ANativeWindow, the dataSpace field describes the color\n+     * space of the buffer, except that dataSpace field\n+     * HAL_DATASPACE_DEPTH indicates that this buffer contains a depth\n+     * image where each sample is a distance value measured by a depth camera,\n+     * plus an associated confidence value.\n+     */\n+    HAL_PIXEL_FORMAT_Y16    = 0x20363159,\n+\n+    /*\n+     * Android RAW sensor format:\n+     *\n+     * This format is exposed outside of the camera HAL to applications.\n+     *\n+     * RAW16 is a single-channel, 16-bit, little endian format, typically\n+     * representing raw Bayer-pattern images from an image sensor, with minimal\n+     * processing.\n+     *\n+     * The exact pixel layout of the data in the buffer is sensor-dependent, and\n+     * needs to be queried from the camera device.\n+     *\n+     * Generally, not all 16 bits are used; more common values are 10 or 12\n+     * bits. If not all bits are used, the lower-order bits are filled first.\n+     * All parameters to interpret the raw data (black and white points,\n+     * color space, etc) must be queried from the camera device.\n+     *\n+     * This format assumes\n+     * - an even width\n+     * - an even height\n+     * - a horizontal stride multiple of 16 pixels\n+     * - a vertical stride equal to the height\n+     * - strides are specified in pixels, not in bytes\n+     *\n+     *   size = stride * height * 2\n+     *\n+     * This format must be accepted by the gralloc module when used with the\n+     * following usage flags:\n+     *    - GRALLOC_USAGE_HW_CAMERA_*\n+     *    - GRALLOC_USAGE_SW_*\n+     *    - GRALLOC_USAGE_RENDERSCRIPT\n+     *\n+     * When used with ANativeWindow, the dataSpace should be\n+     * HAL_DATASPACE_ARBITRARY, as raw image sensor buffers require substantial\n+     * extra metadata to define.\n+     */\n+    HAL_PIXEL_FORMAT_RAW16 = 0x20,\n+\n+    /*\n+     * Android RAW10 format:\n+     *\n+     * This format is exposed outside of the camera HAL to applications.\n+     *\n+     * RAW10 is a single-channel, 10-bit per pixel, densely packed in each row,\n+     * unprocessed format, usually representing raw Bayer-pattern images coming from\n+     * an image sensor.\n+     *\n+     * In an image buffer with this format, starting from the first pixel of each\n+     * row, each 4 consecutive pixels are packed into 5 bytes (40 bits). Each one\n+     * of the first 4 bytes contains the top 8 bits of each pixel, The fifth byte\n+     * contains the 2 least significant bits of the 4 pixels, the exact layout data\n+     * for each 4 consecutive pixels is illustrated below (Pi[j] stands for the jth\n+     * bit of the ith pixel):\n+     *\n+     *          bit 7                                     bit 0\n+     *          =====|=====|=====|=====|=====|=====|=====|=====|\n+     * Byte 0: |P0[9]|P0[8]|P0[7]|P0[6]|P0[5]|P0[4]|P0[3]|P0[2]|\n+     *         |-----|-----|-----|-----|-----|-----|-----|-----|\n+     * Byte 1: |P1[9]|P1[8]|P1[7]|P1[6]|P1[5]|P1[4]|P1[3]|P1[2]|\n+     *         |-----|-----|-----|-----|-----|-----|-----|-----|\n+     * Byte 2: |P2[9]|P2[8]|P2[7]|P2[6]|P2[5]|P2[4]|P2[3]|P2[2]|\n+     *         |-----|-----|-----|-----|-----|-----|-----|-----|\n+     * Byte 3: |P3[9]|P3[8]|P3[7]|P3[6]|P3[5]|P3[4]|P3[3]|P3[2]|\n+     *         |-----|-----|-----|-----|-----|-----|-----|-----|\n+     * Byte 4: |P3[1]|P3[0]|P2[1]|P2[0]|P1[1]|P1[0]|P0[1]|P0[0]|\n+     *          ===============================================\n+     *\n+     * This format assumes\n+     * - a width multiple of 4 pixels\n+     * - an even height\n+     * - a vertical stride equal to the height\n+     * - strides are specified in bytes, not in pixels\n+     *\n+     *   size = stride * height\n+     *\n+     * When stride is equal to width * (10 / 8), there will be no padding bytes at\n+     * the end of each row, the entire image data is densely packed. When stride is\n+     * larger than width * (10 / 8), padding bytes will be present at the end of each\n+     * row (including the last row).\n+     *\n+     * This format must be accepted by the gralloc module when used with the\n+     * following usage flags:\n+     *    - GRALLOC_USAGE_HW_CAMERA_*\n+     *    - GRALLOC_USAGE_SW_*\n+     *    - GRALLOC_USAGE_RENDERSCRIPT\n+     *\n+     * When used with ANativeWindow, the dataSpace field should be\n+     * HAL_DATASPACE_ARBITRARY, as raw image sensor buffers require substantial\n+     * extra metadata to define.\n+     */\n+    HAL_PIXEL_FORMAT_RAW10 = 0x25,\n+\n+    /*\n+     * Android RAW12 format:\n+     *\n+     * This format is exposed outside of camera HAL to applications.\n+     *\n+     * RAW12 is a single-channel, 12-bit per pixel, densely packed in each row,\n+     * unprocessed format, usually representing raw Bayer-pattern images coming from\n+     * an image sensor.\n+     *\n+     * In an image buffer with this format, starting from the first pixel of each\n+     * row, each two consecutive pixels are packed into 3 bytes (24 bits). The first\n+     * and second byte contains the top 8 bits of first and second pixel. The third\n+     * byte contains the 4 least significant bits of the two pixels, the exact layout\n+     * data for each two consecutive pixels is illustrated below (Pi[j] stands for\n+     * the jth bit of the ith pixel):\n+     *\n+     *           bit 7                                            bit 0\n+     *          ======|======|======|======|======|======|======|======|\n+     * Byte 0: |P0[11]|P0[10]|P0[ 9]|P0[ 8]|P0[ 7]|P0[ 6]|P0[ 5]|P0[ 4]|\n+     *         |------|------|------|------|------|------|------|------|\n+     * Byte 1: |P1[11]|P1[10]|P1[ 9]|P1[ 8]|P1[ 7]|P1[ 6]|P1[ 5]|P1[ 4]|\n+     *         |------|------|------|------|------|------|------|------|\n+     * Byte 2: |P1[ 3]|P1[ 2]|P1[ 1]|P1[ 0]|P0[ 3]|P0[ 2]|P0[ 1]|P0[ 0]|\n+     *          =======================================================\n+     *\n+     * This format assumes:\n+     * - a width multiple of 4 pixels\n+     * - an even height\n+     * - a vertical stride equal to the height\n+     * - strides are specified in bytes, not in pixels\n+     *\n+     *   size = stride * height\n+     *\n+     * When stride is equal to width * (12 / 8), there will be no padding bytes at\n+     * the end of each row, the entire image data is densely packed. When stride is\n+     * larger than width * (12 / 8), padding bytes will be present at the end of\n+     * each row (including the last row).\n+     *\n+     * This format must be accepted by the gralloc module when used with the\n+     * following usage flags:\n+     *    - GRALLOC_USAGE_HW_CAMERA_*\n+     *    - GRALLOC_USAGE_SW_*\n+     *    - GRALLOC_USAGE_RENDERSCRIPT\n+     *\n+     * When used with ANativeWindow, the dataSpace field should be\n+     * HAL_DATASPACE_ARBITRARY, as raw image sensor buffers require substantial\n+     * extra metadata to define.\n+     */\n+    HAL_PIXEL_FORMAT_RAW12 = 0x26,\n+\n+    /*\n+     * Android opaque RAW format:\n+     *\n+     * This format is exposed outside of the camera HAL to applications.\n+     *\n+     * RAW_OPAQUE is a format for unprocessed raw image buffers coming from an\n+     * image sensor. The actual structure of buffers of this format is\n+     * implementation-dependent.\n+     *\n+     * This format must be accepted by the gralloc module when used with the\n+     * following usage flags:\n+     *    - GRALLOC_USAGE_HW_CAMERA_*\n+     *    - GRALLOC_USAGE_SW_*\n+     *    - GRALLOC_USAGE_RENDERSCRIPT\n+     *\n+     * When used with ANativeWindow, the dataSpace field should be\n+     * HAL_DATASPACE_ARBITRARY, as raw image sensor buffers require substantial\n+     * extra metadata to define.\n+     */\n+    HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24,\n+\n+    /*\n+     * Android binary blob graphics buffer format:\n+     *\n+     * This format is used to carry task-specific data which does not have a\n+     * standard image structure. The details of the format are left to the two\n+     * endpoints.\n+     *\n+     * A typical use case is for transporting JPEG-compressed images from the\n+     * Camera HAL to the framework or to applications.\n+     *\n+     * Buffers of this format must have a height of 1, and width equal to their\n+     * size in bytes.\n+     *\n+     * When used with ANativeWindow, the mapping of the dataSpace field to\n+     * buffer contents for BLOB is as follows:\n+     *\n+     *  dataSpace value               | Buffer contents\n+     * -------------------------------+-----------------------------------------\n+     *  HAL_DATASPACE_JFIF            | An encoded JPEG image\n+     *  HAL_DATASPACE_DEPTH           | An android_depth_points buffer\n+     *  Other                         | Unsupported\n+     *\n+     */\n+    HAL_PIXEL_FORMAT_BLOB = 0x21,\n+\n+    /*\n+     * Android format indicating that the choice of format is entirely up to the\n+     * device-specific Gralloc implementation.\n+     *\n+     * The Gralloc implementation should examine the usage bits passed in when\n+     * allocating a buffer with this format, and it should derive the pixel\n+     * format from those usage flags.  This format will never be used with any\n+     * of the GRALLOC_USAGE_SW_* usage flags.\n+     *\n+     * If a buffer of this format is to be used as an OpenGL ES texture, the\n+     * framework will assume that sampling the texture will always return an\n+     * alpha value of 1.0 (i.e. the buffer contains only opaque pixel values).\n+     *\n+     * When used with ANativeWindow, the dataSpace field describes the color\n+     * space of the buffer.\n+     */\n+    HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22,\n+\n+    /*\n+     * Android flexible YCbCr 4:2:0 formats\n+     *\n+     * This format allows platforms to use an efficient YCbCr/YCrCb 4:2:0\n+     * buffer layout, while still describing the general format in a\n+     * layout-independent manner.  While called YCbCr, it can be\n+     * used to describe formats with either chromatic ordering, as well as\n+     * whole planar or semiplanar layouts.\n+     *\n+     * struct android_ycbcr (below) is the the struct used to describe it.\n+     *\n+     * This format must be accepted by the gralloc module when\n+     * USAGE_SW_WRITE_* or USAGE_SW_READ_* are set.\n+     *\n+     * This format is locked for use by gralloc's (*lock_ycbcr) method, and\n+     * locking with the (*lock) method will return an error.\n+     *\n+     * When used with ANativeWindow, the dataSpace field describes the color\n+     * space of the buffer.\n+     */\n+    HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23,\n+\n+    /*\n+     * Android flexible YCbCr 4:2:2 formats\n+     *\n+     * This format allows platforms to use an efficient YCbCr/YCrCb 4:2:2\n+     * buffer layout, while still describing the general format in a\n+     * layout-independent manner.  While called YCbCr, it can be\n+     * used to describe formats with either chromatic ordering, as well as\n+     * whole planar or semiplanar layouts.\n+     *\n+     * This format is currently only used by SW readable buffers\n+     * produced by MediaCodecs, so the gralloc module can ignore this format.\n+     */\n+    HAL_PIXEL_FORMAT_YCbCr_422_888 = 0x27,\n+\n+    /*\n+     * Android flexible YCbCr 4:4:4 formats\n+     *\n+     * This format allows platforms to use an efficient YCbCr/YCrCb 4:4:4\n+     * buffer layout, while still describing the general format in a\n+     * layout-independent manner.  While called YCbCr, it can be\n+     * used to describe formats with either chromatic ordering, as well as\n+     * whole planar or semiplanar layouts.\n+     *\n+     * This format is currently only used by SW readable buffers\n+     * produced by MediaCodecs, so the gralloc module can ignore this format.\n+     */\n+    HAL_PIXEL_FORMAT_YCbCr_444_888 = 0x28,\n+\n+    /*\n+     * Android flexible RGB 888 formats\n+     *\n+     * This format allows platforms to use an efficient RGB/BGR/RGBX/BGRX\n+     * buffer layout, while still describing the general format in a\n+     * layout-independent manner.  While called RGB, it can be\n+     * used to describe formats with either color ordering and optional\n+     * padding, as well as whole planar layout.\n+     *\n+     * This format is currently only used by SW readable buffers\n+     * produced by MediaCodecs, so the gralloc module can ignore this format.\n+     */\n+    HAL_PIXEL_FORMAT_FLEX_RGB_888 = 0x29,\n+\n+    /*\n+     * Android flexible RGBA 8888 formats\n+     *\n+     * This format allows platforms to use an efficient RGBA/BGRA/ARGB/ABGR\n+     * buffer layout, while still describing the general format in a\n+     * layout-independent manner.  While called RGBA, it can be\n+     * used to describe formats with any of the component orderings, as\n+     * well as whole planar layout.\n+     *\n+     * This format is currently only used by SW readable buffers\n+     * produced by MediaCodecs, so the gralloc module can ignore this format.\n+     */\n+    HAL_PIXEL_FORMAT_FLEX_RGBA_8888 = 0x2A,\n+\n+    /* Legacy formats (deprecated), used by ImageFormat.java */\n+    HAL_PIXEL_FORMAT_YCbCr_422_SP       = 0x10, // NV16\n+    HAL_PIXEL_FORMAT_YCrCb_420_SP       = 0x11, // NV21\n+    HAL_PIXEL_FORMAT_YCbCr_422_I        = 0x14, // YUY2\n+};\n+\n+/*\n+ * Structure for describing YCbCr formats for consumption by applications.\n+ * This is used with HAL_PIXEL_FORMAT_YCbCr_*_888.\n+ *\n+ * Buffer chroma subsampling is defined in the format.\n+ * e.g. HAL_PIXEL_FORMAT_YCbCr_420_888 has subsampling 4:2:0.\n+ *\n+ * Buffers must have a 8 bit depth.\n+ *\n+ * @y, @cb, and @cr point to the first byte of their respective planes.\n+ *\n+ * Stride describes the distance in bytes from the first value of one row of\n+ * the image to the first value of the next row.  It includes the width of the\n+ * image plus padding.\n+ * @ystride is the stride of the luma plane.\n+ * @cstride is the stride of the chroma planes.\n+ *\n+ * @chroma_step is the distance in bytes from one chroma pixel value to the\n+ * next.  This is 2 bytes for semiplanar (because chroma values are interleaved\n+ * and each chroma value is one byte) and 1 for planar.\n+ */\n+\n+struct android_ycbcr {\n+    void *y;\n+    void *cb;\n+    void *cr;\n+    size_t ystride;\n+    size_t cstride;\n+    size_t chroma_step;\n+\n+    /** reserved for future use, set to 0 by gralloc's (*lock_ycbcr)() */\n+    uint32_t reserved[8];\n+};\n+\n+/**\n+ * Structure used to define depth point clouds for format HAL_PIXEL_FORMAT_BLOB\n+ * with dataSpace value of HAL_DATASPACE_DEPTH.\n+ * When locking a native buffer of the above format and dataSpace value,\n+ * the vaddr pointer can be cast to this structure.\n+ *\n+ * A variable-length list of (x,y,z, confidence) 3D points, as floats.  (x, y,\n+ * z) represents a measured point's position, with the coordinate system defined\n+ * by the data source.  Confidence represents the estimated likelihood that this\n+ * measurement is correct. It is between 0.f and 1.f, inclusive, with 1.f ==\n+ * 100% confidence.\n+ *\n+ * @num_points is the number of points in the list\n+ *\n+ * @xyz_points is the flexible array of floating-point values.\n+ *   It contains (num_points) * 4 floats.\n+ *\n+ *   For example:\n+ *     android_depth_points d = get_depth_buffer();\n+ *     struct {\n+ *       float x; float y; float z; float confidence;\n+ *     } firstPoint, lastPoint;\n+ *\n+ *     firstPoint.x = d.xyzc_points[0];\n+ *     firstPoint.y = d.xyzc_points[1];\n+ *     firstPoint.z = d.xyzc_points[2];\n+ *     firstPoint.confidence = d.xyzc_points[3];\n+ *     lastPoint.x = d.xyzc_points[(d.num_points - 1) * 4 + 0];\n+ *     lastPoint.y = d.xyzc_points[(d.num_points - 1) * 4 + 1];\n+ *     lastPoint.z = d.xyzc_points[(d.num_points - 1) * 4 + 2];\n+ *     lastPoint.confidence = d.xyzc_points[(d.num_points - 1) * 4 + 3];\n+ */\n+\n+struct android_depth_points {\n+    uint32_t num_points;\n+\n+    /** reserved for future use, set to 0 by gralloc's (*lock)() */\n+    uint32_t reserved[8];\n+\n+    float xyzc_points[];\n+};\n+\n+/**\n+ * Transformation definitions\n+ *\n+ * IMPORTANT NOTE:\n+ * HAL_TRANSFORM_ROT_90 is applied CLOCKWISE and AFTER HAL_TRANSFORM_FLIP_{H|V}.\n+ *\n+ */\n+\n+enum {\n+    /* flip source image horizontally (around the vertical axis) */\n+    HAL_TRANSFORM_FLIP_H    = 0x01,\n+    /* flip source image vertically (around the horizontal axis)*/\n+    HAL_TRANSFORM_FLIP_V    = 0x02,\n+    /* rotate source image 90 degrees clockwise */\n+    HAL_TRANSFORM_ROT_90    = 0x04,\n+    /* rotate source image 180 degrees */\n+    HAL_TRANSFORM_ROT_180   = 0x03,\n+    /* rotate source image 270 degrees clockwise */\n+    HAL_TRANSFORM_ROT_270   = 0x07,\n+    /* don't use. see system/window.h */\n+    HAL_TRANSFORM_RESERVED  = 0x08,\n+};\n+\n+/**\n+ * Dataspace Definitions\n+ * ======================\n+ *\n+ * Dataspace is the definition of how pixel values should be interpreted.\n+ *\n+ * For many formats, this is the colorspace of the image data, which includes\n+ * primaries (including white point) and the transfer characteristic function,\n+ * which describes both gamma curve and numeric range (within the bit depth).\n+ *\n+ * Other dataspaces include depth measurement data from a depth camera.\n+ */\n+\n+typedef enum android_dataspace {\n+    /*\n+     * Default-assumption data space, when not explicitly specified.\n+     *\n+     * It is safest to assume the buffer is an image with sRGB primaries and\n+     * encoding ranges, but the consumer and/or the producer of the data may\n+     * simply be using defaults. No automatic gamma transform should be\n+     * expected, except for a possible display gamma transform when drawn to a\n+     * screen.\n+     */\n+    HAL_DATASPACE_UNKNOWN = 0x0,\n+\n+    /*\n+     * Arbitrary dataspace with manually defined characteristics.  Definition\n+     * for colorspaces or other meaning must be communicated separately.\n+     *\n+     * This is used when specifying primaries, transfer characteristics,\n+     * etc. separately.\n+     *\n+     * A typical use case is in video encoding parameters (e.g. for H.264),\n+     * where a colorspace can have separately defined primaries, transfer\n+     * characteristics, etc.\n+     */\n+    HAL_DATASPACE_ARBITRARY = 0x1,\n+\n+    /*\n+     * RGB Colorspaces\n+     * -----------------\n+     *\n+     * Primaries are given using (x,y) coordinates in the CIE 1931 definition\n+     * of x and y specified by ISO 11664-1.\n+     *\n+     * Transfer characteristics are the opto-electronic transfer characteristic\n+     * at the source as a function of linear optical intensity (luminance).\n+     */\n+\n+    /*\n+     * sRGB linear encoding:\n+     *\n+     * The red, green, and blue components are stored in sRGB space, but\n+     * are linear, not gamma-encoded.\n+     * The RGB primaries and the white point are the same as BT.709.\n+     *\n+     * The values are encoded using the full range ([0,255] for 8-bit) for all\n+     * components.\n+     */\n+    HAL_DATASPACE_SRGB_LINEAR = 0x200,\n+\n+    /*\n+     * sRGB gamma encoding:\n+     *\n+     * The red, green and blue components are stored in sRGB space, and\n+     * converted to linear space when read, using the standard sRGB to linear\n+     * equation:\n+     *\n+     * Clinear = Csrgb / 12.92                  for Csrgb <= 0.04045\n+     *         = (Csrgb + 0.055 / 1.055)^2.4    for Csrgb >  0.04045\n+     *\n+     * When written the inverse transformation is performed:\n+     *\n+     * Csrgb = 12.92 * Clinear                  for Clinear <= 0.0031308\n+     *       = 1.055 * Clinear^(1/2.4) - 0.055  for Clinear >  0.0031308\n+     *\n+     *\n+     * The alpha component, if present, is always stored in linear space and\n+     * is left unmodified when read or written.\n+     *\n+     * The RGB primaries and the white point are the same as BT.709.\n+     *\n+     * The values are encoded using the full range ([0,255] for 8-bit) for all\n+     * components.\n+     *\n+     */\n+    HAL_DATASPACE_SRGB = 0x201,\n+\n+    /*\n+     * YCbCr Colorspaces\n+     * -----------------\n+     *\n+     * Primaries are given using (x,y) coordinates in the CIE 1931 definition\n+     * of x and y specified by ISO 11664-1.\n+     *\n+     * Transfer characteristics are the opto-electronic transfer characteristic\n+     * at the source as a function of linear optical intensity (luminance).\n+     */\n+\n+    /*\n+     * JPEG File Interchange Format (JFIF)\n+     *\n+     * Same model as BT.601-625, but all values (Y, Cb, Cr) range from 0 to 255\n+     *\n+     * Transfer characteristic curve:\n+     *  E = 1.099 * L ^ 0.45 - 0.099, 1.00 >= L >= 0.018\n+     *  E = 4.500 L, 0.018 > L >= 0\n+     *      L - luminance of image 0 <= L <= 1 for conventional colorimetry\n+     *      E - corresponding electrical signal\n+     *\n+     * Primaries:       x       y\n+     *  green           0.290   0.600\n+     *  blue            0.150   0.060\n+     *  red             0.640   0.330\n+     *  white (D65)     0.3127  0.3290\n+     */\n+    HAL_DATASPACE_JFIF = 0x101,\n+\n+    /*\n+     * ITU-R Recommendation 601 (BT.601) - 625-line\n+     *\n+     * Standard-definition television, 625 Lines (PAL)\n+     *\n+     * For 8-bit-depth formats:\n+     * Luma (Y) samples should range from 16 to 235, inclusive\n+     * Chroma (Cb, Cr) samples should range from 16 to 240, inclusive\n+     *\n+     * For 10-bit-depth formats:\n+     * Luma (Y) samples should range from 64 to 940, inclusive\n+     * Chroma (Cb, Cr) samples should range from 64 to 960, inclusive\n+     *\n+     * Transfer characteristic curve:\n+     *  E = 1.099 * L ^ 0.45 - 0.099, 1.00 >= L >= 0.018\n+     *  E = 4.500 L, 0.018 > L >= 0\n+     *      L - luminance of image 0 <= L <= 1 for conventional colorimetry\n+     *      E - corresponding electrical signal\n+     *\n+     * Primaries:       x       y\n+     *  green           0.290   0.600\n+     *  blue            0.150   0.060\n+     *  red             0.640   0.330\n+     *  white (D65)     0.3127  0.3290\n+     */\n+    HAL_DATASPACE_BT601_625 = 0x102,\n+\n+    /*\n+     * ITU-R Recommendation 601 (BT.601) - 525-line\n+     *\n+     * Standard-definition television, 525 Lines (NTSC)\n+     *\n+     * For 8-bit-depth formats:\n+     * Luma (Y) samples should range from 16 to 235, inclusive\n+     * Chroma (Cb, Cr) samples should range from 16 to 240, inclusive\n+     *\n+     * For 10-bit-depth formats:\n+     * Luma (Y) samples should range from 64 to 940, inclusive\n+     * Chroma (Cb, Cr) samples should range from 64 to 960, inclusive\n+     *\n+     * Transfer characteristic curve:\n+     *  E = 1.099 * L ^ 0.45 - 0.099, 1.00 >= L >= 0.018\n+     *  E = 4.500 L, 0.018 > L >= 0\n+     *      L - luminance of image 0 <= L <= 1 for conventional colorimetry\n+     *      E - corresponding electrical signal\n+     *\n+     * Primaries:       x       y\n+     *  green           0.310   0.595\n+     *  blue            0.155   0.070\n+     *  red             0.630   0.340\n+     *  white (D65)     0.3127  0.3290\n+     */\n+    HAL_DATASPACE_BT601_525 = 0x103,\n+\n+    /*\n+     * ITU-R Recommendation 709 (BT.709)\n+     *\n+     * High-definition television\n+     *\n+     * For 8-bit-depth formats:\n+     * Luma (Y) samples should range from 16 to 235, inclusive\n+     * Chroma (Cb, Cr) samples should range from 16 to 240, inclusive\n+     *\n+     * For 10-bit-depth formats:\n+     * Luma (Y) samples should range from 64 to 940, inclusive\n+     * Chroma (Cb, Cr) samples should range from 64 to 960, inclusive\n+     *\n+     * Primaries:       x       y\n+     *  green           0.300   0.600\n+     *  blue            0.150   0.060\n+     *  red             0.640   0.330\n+     *  white (D65)     0.3127  0.3290\n+     */\n+    HAL_DATASPACE_BT709 = 0x104,\n+\n+    /*\n+     * The buffer contains depth ranging measurements from a depth camera.\n+     * This value is valid with formats:\n+     *    HAL_PIXEL_FORMAT_Y16: 16-bit samples, consisting of a depth measurement\n+     *       and an associated confidence value. The 3 MSBs of the sample make\n+     *       up the confidence value, and the low 13 LSBs of the sample make up\n+     *       the depth measurement.\n+     *       For the confidence section, 0 means 100% confidence, 1 means 0%\n+     *       confidence. The mapping to a linear float confidence value between\n+     *       0.f and 1.f can be obtained with\n+     *         float confidence = (((depthSample >> 13) - 1) & 0x7) / 7.0f;\n+     *       The depth measurement can be extracted simply with\n+     *         uint16_t range = (depthSample & 0x1FFF);\n+     *    HAL_PIXEL_FORMAT_BLOB: A depth point cloud, as\n+     *       a variable-length float (x,y,z, confidence) coordinate point list.\n+     *       The point cloud will be represented with the android_depth_points\n+     *       structure.\n+     */\n+    HAL_DATASPACE_DEPTH = 0x1000\n+\n+} android_dataspace_t;\n+\n+#ifdef __cplusplus\n+}\n+#endif\n+\n+#endif /* SYSTEM_CORE_INCLUDE_ANDROID_GRAPHICS_H */\ndiff --git a/include/android/system/core/include/system/window.h b/include/android/system/core/include/system/window.h\nnew file mode 100644\nindex 000000000000..508ce00bacec\n--- /dev/null\n+++ b/include/android/system/core/include/system/window.h\n@@ -0,0 +1,954 @@\n+/*\n+ * Copyright (C) 2011 The Android Open Source Project\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ *      http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+\n+#ifndef SYSTEM_CORE_INCLUDE_ANDROID_WINDOW_H\n+#define SYSTEM_CORE_INCLUDE_ANDROID_WINDOW_H\n+\n+#include <cutils/native_handle.h>\n+#include <errno.h>\n+#include <limits.h>\n+#include <stdint.h>\n+#include <string.h>\n+#include <sys/cdefs.h>\n+#include <system/graphics.h>\n+#include <unistd.h>\n+\n+#ifndef __UNUSED\n+#define __UNUSED __attribute__((__unused__))\n+#endif\n+#ifndef __deprecated\n+#define __deprecated __attribute__((__deprecated__))\n+#endif\n+\n+__BEGIN_DECLS\n+\n+/*****************************************************************************/\n+\n+#define ANDROID_NATIVE_MAKE_CONSTANT(a,b,c,d) \\\n+    (((unsigned)(a)<<24)|((unsigned)(b)<<16)|((unsigned)(c)<<8)|(unsigned)(d))\n+\n+#define ANDROID_NATIVE_WINDOW_MAGIC \\\n+    ANDROID_NATIVE_MAKE_CONSTANT('_','w','n','d')\n+\n+#define ANDROID_NATIVE_BUFFER_MAGIC \\\n+    ANDROID_NATIVE_MAKE_CONSTANT('_','b','f','r')\n+\n+// ---------------------------------------------------------------------------\n+\n+// This #define may be used to conditionally compile device-specific code to\n+// support either the prior ANativeWindow interface, which did not pass libsync\n+// fences around, or the new interface that does.  This #define is only present\n+// when the ANativeWindow interface does include libsync support.\n+#define ANDROID_NATIVE_WINDOW_HAS_SYNC 1\n+\n+// ---------------------------------------------------------------------------\n+\n+typedef const native_handle_t* buffer_handle_t;\n+\n+// ---------------------------------------------------------------------------\n+\n+typedef struct android_native_rect_t\n+{\n+    int32_t left;\n+    int32_t top;\n+    int32_t right;\n+    int32_t bottom;\n+} android_native_rect_t;\n+\n+// ---------------------------------------------------------------------------\n+\n+typedef struct android_native_base_t\n+{\n+    /* a magic value defined by the actual EGL native type */\n+    int magic;\n+\n+    /* the sizeof() of the actual EGL native type */\n+    int version;\n+\n+    void* reserved[4];\n+\n+    /* reference-counting interface */\n+    void (*incRef)(struct android_native_base_t* base);\n+    void (*decRef)(struct android_native_base_t* base);\n+} android_native_base_t;\n+\n+typedef struct ANativeWindowBuffer\n+{\n+#ifdef __cplusplus\n+    ANativeWindowBuffer() {\n+        common.magic = ANDROID_NATIVE_BUFFER_MAGIC;\n+        common.version = sizeof(ANativeWindowBuffer);\n+        memset(common.reserved, 0, sizeof(common.reserved));\n+    }\n+\n+    // Implement the methods that sp<ANativeWindowBuffer> expects so that it\n+    // can be used to automatically refcount ANativeWindowBuffer's.\n+    void incStrong(const void* /*id*/) const {\n+        common.incRef(const_cast<android_native_base_t*>(&common));\n+    }\n+    void decStrong(const void* /*id*/) const {\n+        common.decRef(const_cast<android_native_base_t*>(&common));\n+    }\n+#endif\n+\n+    struct android_native_base_t common;\n+\n+    int width;\n+    int height;\n+    int stride;\n+    int format;\n+    int usage;\n+\n+    void* reserved[2];\n+\n+    buffer_handle_t handle;\n+\n+    void* reserved_proc[8];\n+} ANativeWindowBuffer_t;\n+\n+// Old typedef for backwards compatibility.\n+typedef ANativeWindowBuffer_t android_native_buffer_t;\n+\n+// ---------------------------------------------------------------------------\n+\n+/* attributes queriable with query() */\n+enum {\n+    NATIVE_WINDOW_WIDTH     = 0,\n+    NATIVE_WINDOW_HEIGHT    = 1,\n+    NATIVE_WINDOW_FORMAT    = 2,\n+\n+    /* The minimum number of buffers that must remain un-dequeued after a buffer\n+     * has been queued.  This value applies only if set_buffer_count was used to\n+     * override the number of buffers and if a buffer has since been queued.\n+     * Users of the set_buffer_count ANativeWindow method should query this\n+     * value before calling set_buffer_count.  If it is necessary to have N\n+     * buffers simultaneously dequeued as part of the steady-state operation,\n+     * and this query returns M then N+M buffers should be requested via\n+     * native_window_set_buffer_count.\n+     *\n+     * Note that this value does NOT apply until a single buffer has been\n+     * queued.  In particular this means that it is possible to:\n+     *\n+     * 1. Query M = min undequeued buffers\n+     * 2. Set the buffer count to N + M\n+     * 3. Dequeue all N + M buffers\n+     * 4. Cancel M buffers\n+     * 5. Queue, dequeue, queue, dequeue, ad infinitum\n+     */\n+    NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS = 3,\n+\n+    /* Check whether queueBuffer operations on the ANativeWindow send the buffer\n+     * to the window compositor.  The query sets the returned 'value' argument\n+     * to 1 if the ANativeWindow DOES send queued buffers directly to the window\n+     * compositor and 0 if the buffers do not go directly to the window\n+     * compositor.\n+     *\n+     * This can be used to determine whether protected buffer content should be\n+     * sent to the ANativeWindow.  Note, however, that a result of 1 does NOT\n+     * indicate that queued buffers will be protected from applications or users\n+     * capturing their contents.  If that behavior is desired then some other\n+     * mechanism (e.g. the GRALLOC_USAGE_PROTECTED flag) should be used in\n+     * conjunction with this query.\n+     */\n+    NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER = 4,\n+\n+    /* Get the concrete type of a ANativeWindow.  See below for the list of\n+     * possible return values.\n+     *\n+     * This query should not be used outside the Android framework and will\n+     * likely be removed in the near future.\n+     */\n+    NATIVE_WINDOW_CONCRETE_TYPE = 5,\n+\n+\n+    /*\n+     * Default width and height of ANativeWindow buffers, these are the\n+     * dimensions of the window buffers irrespective of the\n+     * NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS call and match the native window\n+     * size unless overridden by NATIVE_WINDOW_SET_BUFFERS_USER_DIMENSIONS.\n+     */\n+    NATIVE_WINDOW_DEFAULT_WIDTH = 6,\n+    NATIVE_WINDOW_DEFAULT_HEIGHT = 7,\n+\n+    /*\n+     * transformation that will most-likely be applied to buffers. This is only\n+     * a hint, the actual transformation applied might be different.\n+     *\n+     * INTENDED USE:\n+     *\n+     * The transform hint can be used by a producer, for instance the GLES\n+     * driver, to pre-rotate the rendering such that the final transformation\n+     * in the composer is identity. This can be very useful when used in\n+     * conjunction with the h/w composer HAL, in situations where it\n+     * cannot handle arbitrary rotations.\n+     *\n+     * 1. Before dequeuing a buffer, the GL driver (or any other ANW client)\n+     *    queries the ANW for NATIVE_WINDOW_TRANSFORM_HINT.\n+     *\n+     * 2. The GL driver overrides the width and height of the ANW to\n+     *    account for NATIVE_WINDOW_TRANSFORM_HINT. This is done by querying\n+     *    NATIVE_WINDOW_DEFAULT_{WIDTH | HEIGHT}, swapping the dimensions\n+     *    according to NATIVE_WINDOW_TRANSFORM_HINT and calling\n+     *    native_window_set_buffers_dimensions().\n+     *\n+     * 3. The GL driver dequeues a buffer of the new pre-rotated size.\n+     *\n+     * 4. The GL driver renders to the buffer such that the image is\n+     *    already transformed, that is applying NATIVE_WINDOW_TRANSFORM_HINT\n+     *    to the rendering.\n+     *\n+     * 5. The GL driver calls native_window_set_transform to apply\n+     *    inverse transformation to the buffer it just rendered.\n+     *    In order to do this, the GL driver needs\n+     *    to calculate the inverse of NATIVE_WINDOW_TRANSFORM_HINT, this is\n+     *    done easily:\n+     *\n+     *        int hintTransform, inverseTransform;\n+     *        query(..., NATIVE_WINDOW_TRANSFORM_HINT, &hintTransform);\n+     *        inverseTransform = hintTransform;\n+     *        if (hintTransform & HAL_TRANSFORM_ROT_90)\n+     *            inverseTransform ^= HAL_TRANSFORM_ROT_180;\n+     *\n+     *\n+     * 6. The GL driver queues the pre-transformed buffer.\n+     *\n+     * 7. The composer combines the buffer transform with the display\n+     *    transform.  If the buffer transform happens to cancel out the\n+     *    display transform then no rotation is needed.\n+     *\n+     */\n+    NATIVE_WINDOW_TRANSFORM_HINT = 8,\n+\n+    /*\n+     * Boolean that indicates whether the consumer is running more than\n+     * one buffer behind the producer.\n+     */\n+    NATIVE_WINDOW_CONSUMER_RUNNING_BEHIND = 9,\n+\n+    /*\n+     * The consumer gralloc usage bits currently set by the consumer.\n+     * The values are defined in hardware/libhardware/include/gralloc.h.\n+     */\n+    NATIVE_WINDOW_CONSUMER_USAGE_BITS = 10,\n+\n+    /**\n+     * Transformation that will by applied to buffers by the hwcomposer.\n+     * This must not be set or checked by producer endpoints, and will\n+     * disable the transform hint set in SurfaceFlinger (see\n+     * NATIVE_WINDOW_TRANSFORM_HINT).\n+     *\n+     * INTENDED USE:\n+     * Temporary - Please do not use this.  This is intended only to be used\n+     * by the camera's LEGACY mode.\n+     *\n+     * In situations where a SurfaceFlinger client wishes to set a transform\n+     * that is not visible to the producer, and will always be applied in the\n+     * hardware composer, the client can set this flag with\n+     * native_window_set_buffers_sticky_transform.  This can be used to rotate\n+     * and flip buffers consumed by hardware composer without actually changing\n+     * the aspect ratio of the buffers produced.\n+     */\n+    NATIVE_WINDOW_STICKY_TRANSFORM = 11,\n+\n+    /**\n+     * The default data space for the buffers as set by the consumer.\n+     * The values are defined in graphics.h.\n+     */\n+    NATIVE_WINDOW_DEFAULT_DATASPACE = 12,\n+\n+    /*\n+     * Returns the age of the contents of the most recently dequeued buffer as\n+     * the number of frames that have elapsed since it was last queued. For\n+     * example, if the window is double-buffered, the age of any given buffer in\n+     * steady state will be 2. If the dequeued buffer has never been queued, its\n+     * age will be 0.\n+     */\n+    NATIVE_WINDOW_BUFFER_AGE = 13,\n+};\n+\n+/* Valid operations for the (*perform)() hook.\n+ *\n+ * Values marked as 'deprecated' are supported, but have been superceded by\n+ * other functionality.\n+ *\n+ * Values marked as 'private' should be considered private to the framework.\n+ * HAL implementation code with access to an ANativeWindow should not use these,\n+ * as it may not interact properly with the framework's use of the\n+ * ANativeWindow.\n+ */\n+enum {\n+    NATIVE_WINDOW_SET_USAGE                 =  0,\n+    NATIVE_WINDOW_CONNECT                   =  1,   /* deprecated */\n+    NATIVE_WINDOW_DISCONNECT                =  2,   /* deprecated */\n+    NATIVE_WINDOW_SET_CROP                  =  3,   /* private */\n+    NATIVE_WINDOW_SET_BUFFER_COUNT          =  4,\n+    NATIVE_WINDOW_SET_BUFFERS_GEOMETRY      =  5,   /* deprecated */\n+    NATIVE_WINDOW_SET_BUFFERS_TRANSFORM     =  6,\n+    NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP     =  7,\n+    NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS    =  8,\n+    NATIVE_WINDOW_SET_BUFFERS_FORMAT        =  9,\n+    NATIVE_WINDOW_SET_SCALING_MODE          = 10,   /* private */\n+    NATIVE_WINDOW_LOCK                      = 11,   /* private */\n+    NATIVE_WINDOW_UNLOCK_AND_POST           = 12,   /* private */\n+    NATIVE_WINDOW_API_CONNECT               = 13,   /* private */\n+    NATIVE_WINDOW_API_DISCONNECT            = 14,   /* private */\n+    NATIVE_WINDOW_SET_BUFFERS_USER_DIMENSIONS = 15, /* private */\n+    NATIVE_WINDOW_SET_POST_TRANSFORM_CROP   = 16,   /* private */\n+    NATIVE_WINDOW_SET_BUFFERS_STICKY_TRANSFORM = 17,/* private */\n+    NATIVE_WINDOW_SET_SIDEBAND_STREAM       = 18,\n+    NATIVE_WINDOW_SET_BUFFERS_DATASPACE     = 19,\n+    NATIVE_WINDOW_SET_SURFACE_DAMAGE        = 20,   /* private */\n+};\n+\n+/* parameter for NATIVE_WINDOW_[API_][DIS]CONNECT */\n+enum {\n+    /* Buffers will be queued by EGL via eglSwapBuffers after being filled using\n+     * OpenGL ES.\n+     */\n+    NATIVE_WINDOW_API_EGL = 1,\n+\n+    /* Buffers will be queued after being filled using the CPU\n+     */\n+    NATIVE_WINDOW_API_CPU = 2,\n+\n+    /* Buffers will be queued by Stagefright after being filled by a video\n+     * decoder.  The video decoder can either be a software or hardware decoder.\n+     */\n+    NATIVE_WINDOW_API_MEDIA = 3,\n+\n+    /* Buffers will be queued by the the camera HAL.\n+     */\n+    NATIVE_WINDOW_API_CAMERA = 4,\n+};\n+\n+/* parameter for NATIVE_WINDOW_SET_BUFFERS_TRANSFORM */\n+enum {\n+    /* flip source image horizontally */\n+    NATIVE_WINDOW_TRANSFORM_FLIP_H = HAL_TRANSFORM_FLIP_H ,\n+    /* flip source image vertically */\n+    NATIVE_WINDOW_TRANSFORM_FLIP_V = HAL_TRANSFORM_FLIP_V,\n+    /* rotate source image 90 degrees clock-wise, and is applied after TRANSFORM_FLIP_{H|V} */\n+    NATIVE_WINDOW_TRANSFORM_ROT_90 = HAL_TRANSFORM_ROT_90,\n+    /* rotate source image 180 degrees */\n+    NATIVE_WINDOW_TRANSFORM_ROT_180 = HAL_TRANSFORM_ROT_180,\n+    /* rotate source image 270 degrees clock-wise */\n+    NATIVE_WINDOW_TRANSFORM_ROT_270 = HAL_TRANSFORM_ROT_270,\n+    /* transforms source by the inverse transform of the screen it is displayed onto. This\n+     * transform is applied last */\n+    NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY = 0x08\n+};\n+\n+/* parameter for NATIVE_WINDOW_SET_SCALING_MODE */\n+enum {\n+    /* the window content is not updated (frozen) until a buffer of\n+     * the window size is received (enqueued)\n+     */\n+    NATIVE_WINDOW_SCALING_MODE_FREEZE           = 0,\n+    /* the buffer is scaled in both dimensions to match the window size */\n+    NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW  = 1,\n+    /* the buffer is scaled uniformly such that the smaller dimension\n+     * of the buffer matches the window size (cropping in the process)\n+     */\n+    NATIVE_WINDOW_SCALING_MODE_SCALE_CROP       = 2,\n+    /* the window is clipped to the size of the buffer's crop rectangle; pixels\n+     * outside the crop rectangle are treated as if they are completely\n+     * transparent.\n+     */\n+    NATIVE_WINDOW_SCALING_MODE_NO_SCALE_CROP    = 3,\n+};\n+\n+/* values returned by the NATIVE_WINDOW_CONCRETE_TYPE query */\n+enum {\n+    NATIVE_WINDOW_FRAMEBUFFER               = 0, /* FramebufferNativeWindow */\n+    NATIVE_WINDOW_SURFACE                   = 1, /* Surface */\n+};\n+\n+/* parameter for NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP\n+ *\n+ * Special timestamp value to indicate that timestamps should be auto-generated\n+ * by the native window when queueBuffer is called.  This is equal to INT64_MIN,\n+ * defined directly to avoid problems with C99/C++ inclusion of stdint.h.\n+ */\n+static const int64_t NATIVE_WINDOW_TIMESTAMP_AUTO = (-9223372036854775807LL-1);\n+\n+struct ANativeWindow\n+{\n+#ifdef __cplusplus\n+    ANativeWindow()\n+        : flags(0), minSwapInterval(0), maxSwapInterval(0), xdpi(0), ydpi(0)\n+    {\n+        common.magic = ANDROID_NATIVE_WINDOW_MAGIC;\n+        common.version = sizeof(ANativeWindow);\n+        memset(common.reserved, 0, sizeof(common.reserved));\n+    }\n+\n+    /* Implement the methods that sp<ANativeWindow> expects so that it\n+       can be used to automatically refcount ANativeWindow's. */\n+    void incStrong(const void* /*id*/) const {\n+        common.incRef(const_cast<android_native_base_t*>(&common));\n+    }\n+    void decStrong(const void* /*id*/) const {\n+        common.decRef(const_cast<android_native_base_t*>(&common));\n+    }\n+#endif\n+\n+    struct android_native_base_t common;\n+\n+    /* flags describing some attributes of this surface or its updater */\n+    const uint32_t flags;\n+\n+    /* min swap interval supported by this updated */\n+    const int   minSwapInterval;\n+\n+    /* max swap interval supported by this updated */\n+    const int   maxSwapInterval;\n+\n+    /* horizontal and vertical resolution in DPI */\n+    const float xdpi;\n+    const float ydpi;\n+\n+    /* Some storage reserved for the OEM's driver. */\n+    intptr_t    oem[4];\n+\n+    /*\n+     * Set the swap interval for this surface.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     */\n+    int     (*setSwapInterval)(struct ANativeWindow* window,\n+                int interval);\n+\n+    /*\n+     * Hook called by EGL to acquire a buffer. After this call, the buffer\n+     * is not locked, so its content cannot be modified. This call may block if\n+     * no buffers are available.\n+     *\n+     * The window holds a reference to the buffer between dequeueBuffer and\n+     * either queueBuffer or cancelBuffer, so clients only need their own\n+     * reference if they might use the buffer after queueing or canceling it.\n+     * Holding a reference to a buffer after queueing or canceling it is only\n+     * allowed if a specific buffer count has been set.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     *\n+     * XXX: This function is deprecated.  It will continue to work for some\n+     * time for binary compatibility, but the new dequeueBuffer function that\n+     * outputs a fence file descriptor should be used in its place.\n+     */\n+    int     (*dequeueBuffer_DEPRECATED)(struct ANativeWindow* window,\n+                struct ANativeWindowBuffer** buffer);\n+\n+    /*\n+     * hook called by EGL to lock a buffer. This MUST be called before modifying\n+     * the content of a buffer. The buffer must have been acquired with\n+     * dequeueBuffer first.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     *\n+     * XXX: This function is deprecated.  It will continue to work for some\n+     * time for binary compatibility, but it is essentially a no-op, and calls\n+     * to it should be removed.\n+     */\n+    int     (*lockBuffer_DEPRECATED)(struct ANativeWindow* window,\n+                struct ANativeWindowBuffer* buffer);\n+\n+    /*\n+     * Hook called by EGL when modifications to the render buffer are done.\n+     * This unlocks and post the buffer.\n+     *\n+     * The window holds a reference to the buffer between dequeueBuffer and\n+     * either queueBuffer or cancelBuffer, so clients only need their own\n+     * reference if they might use the buffer after queueing or canceling it.\n+     * Holding a reference to a buffer after queueing or canceling it is only\n+     * allowed if a specific buffer count has been set.\n+     *\n+     * Buffers MUST be queued in the same order than they were dequeued.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     *\n+     * XXX: This function is deprecated.  It will continue to work for some\n+     * time for binary compatibility, but the new queueBuffer function that\n+     * takes a fence file descriptor should be used in its place (pass a value\n+     * of -1 for the fence file descriptor if there is no valid one to pass).\n+     */\n+    int     (*queueBuffer_DEPRECATED)(struct ANativeWindow* window,\n+                struct ANativeWindowBuffer* buffer);\n+\n+    /*\n+     * hook used to retrieve information about the native window.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     */\n+    int     (*query)(const struct ANativeWindow* window,\n+                int what, int* value);\n+\n+    /*\n+     * hook used to perform various operations on the surface.\n+     * (*perform)() is a generic mechanism to add functionality to\n+     * ANativeWindow while keeping backward binary compatibility.\n+     *\n+     * DO NOT CALL THIS HOOK DIRECTLY.  Instead, use the helper functions\n+     * defined below.\n+     *\n+     * (*perform)() returns -ENOENT if the 'what' parameter is not supported\n+     * by the surface's implementation.\n+     *\n+     * See above for a list of valid operations, such as\n+     * NATIVE_WINDOW_SET_USAGE or NATIVE_WINDOW_CONNECT\n+     */\n+    int     (*perform)(struct ANativeWindow* window,\n+                int operation, ... );\n+\n+    /*\n+     * Hook used to cancel a buffer that has been dequeued.\n+     * No synchronization is performed between dequeue() and cancel(), so\n+     * either external synchronization is needed, or these functions must be\n+     * called from the same thread.\n+     *\n+     * The window holds a reference to the buffer between dequeueBuffer and\n+     * either queueBuffer or cancelBuffer, so clients only need their own\n+     * reference if they might use the buffer after queueing or canceling it.\n+     * Holding a reference to a buffer after queueing or canceling it is only\n+     * allowed if a specific buffer count has been set.\n+     *\n+     * XXX: This function is deprecated.  It will continue to work for some\n+     * time for binary compatibility, but the new cancelBuffer function that\n+     * takes a fence file descriptor should be used in its place (pass a value\n+     * of -1 for the fence file descriptor if there is no valid one to pass).\n+     */\n+    int     (*cancelBuffer_DEPRECATED)(struct ANativeWindow* window,\n+                struct ANativeWindowBuffer* buffer);\n+\n+    /*\n+     * Hook called by EGL to acquire a buffer. This call may block if no\n+     * buffers are available.\n+     *\n+     * The window holds a reference to the buffer between dequeueBuffer and\n+     * either queueBuffer or cancelBuffer, so clients only need their own\n+     * reference if they might use the buffer after queueing or canceling it.\n+     * Holding a reference to a buffer after queueing or canceling it is only\n+     * allowed if a specific buffer count has been set.\n+     *\n+     * The libsync fence file descriptor returned in the int pointed to by the\n+     * fenceFd argument will refer to the fence that must signal before the\n+     * dequeued buffer may be written to.  A value of -1 indicates that the\n+     * caller may access the buffer immediately without waiting on a fence.  If\n+     * a valid file descriptor is returned (i.e. any value except -1) then the\n+     * caller is responsible for closing the file descriptor.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     */\n+    int     (*dequeueBuffer)(struct ANativeWindow* window,\n+                struct ANativeWindowBuffer** buffer, int* fenceFd);\n+\n+    /*\n+     * Hook called by EGL when modifications to the render buffer are done.\n+     * This unlocks and post the buffer.\n+     *\n+     * The window holds a reference to the buffer between dequeueBuffer and\n+     * either queueBuffer or cancelBuffer, so clients only need their own\n+     * reference if they might use the buffer after queueing or canceling it.\n+     * Holding a reference to a buffer after queueing or canceling it is only\n+     * allowed if a specific buffer count has been set.\n+     *\n+     * The fenceFd argument specifies a libsync fence file descriptor for a\n+     * fence that must signal before the buffer can be accessed.  If the buffer\n+     * can be accessed immediately then a value of -1 should be used.  The\n+     * caller must not use the file descriptor after it is passed to\n+     * queueBuffer, and the ANativeWindow implementation is responsible for\n+     * closing it.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     */\n+    int     (*queueBuffer)(struct ANativeWindow* window,\n+                struct ANativeWindowBuffer* buffer, int fenceFd);\n+\n+    /*\n+     * Hook used to cancel a buffer that has been dequeued.\n+     * No synchronization is performed between dequeue() and cancel(), so\n+     * either external synchronization is needed, or these functions must be\n+     * called from the same thread.\n+     *\n+     * The window holds a reference to the buffer between dequeueBuffer and\n+     * either queueBuffer or cancelBuffer, so clients only need their own\n+     * reference if they might use the buffer after queueing or canceling it.\n+     * Holding a reference to a buffer after queueing or canceling it is only\n+     * allowed if a specific buffer count has been set.\n+     *\n+     * The fenceFd argument specifies a libsync fence file decsriptor for a\n+     * fence that must signal before the buffer can be accessed.  If the buffer\n+     * can be accessed immediately then a value of -1 should be used.\n+     *\n+     * Note that if the client has not waited on the fence that was returned\n+     * from dequeueBuffer, that same fence should be passed to cancelBuffer to\n+     * ensure that future uses of the buffer are preceded by a wait on that\n+     * fence.  The caller must not use the file descriptor after it is passed\n+     * to cancelBuffer, and the ANativeWindow implementation is responsible for\n+     * closing it.\n+     *\n+     * Returns 0 on success or -errno on error.\n+     */\n+    int     (*cancelBuffer)(struct ANativeWindow* window,\n+                struct ANativeWindowBuffer* buffer, int fenceFd);\n+};\n+\n+ /* Backwards compatibility: use ANativeWindow (struct ANativeWindow in C).\n+  * android_native_window_t is deprecated.\n+  */\n+typedef struct ANativeWindow ANativeWindow;\n+typedef struct ANativeWindow android_native_window_t __deprecated;\n+\n+/*\n+ *  native_window_set_usage(..., usage)\n+ *  Sets the intended usage flags for the next buffers\n+ *  acquired with (*lockBuffer)() and on.\n+ *  By default (if this function is never called), a usage of\n+ *      GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE\n+ *  is assumed.\n+ *  Calling this function will usually cause following buffers to be\n+ *  reallocated.\n+ */\n+\n+static inline int native_window_set_usage(\n+        struct ANativeWindow* window, int usage)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_USAGE, usage);\n+}\n+\n+/* deprecated. Always returns 0. Don't call. */\n+static inline int native_window_connect(\n+        struct ANativeWindow* window __UNUSED, int api __UNUSED) __deprecated;\n+\n+static inline int native_window_connect(\n+        struct ANativeWindow* window __UNUSED, int api __UNUSED) {\n+    return 0;\n+}\n+\n+/* deprecated. Always returns 0. Don't call. */\n+static inline int native_window_disconnect(\n+        struct ANativeWindow* window __UNUSED, int api __UNUSED) __deprecated;\n+\n+static inline int native_window_disconnect(\n+        struct ANativeWindow* window __UNUSED, int api __UNUSED) {\n+    return 0;\n+}\n+\n+/*\n+ * native_window_set_crop(..., crop)\n+ * Sets which region of the next queued buffers needs to be considered.\n+ * Depending on the scaling mode, a buffer's crop region is scaled and/or\n+ * cropped to match the surface's size.  This function sets the crop in\n+ * pre-transformed buffer pixel coordinates.\n+ *\n+ * The specified crop region applies to all buffers queued after it is called.\n+ *\n+ * If 'crop' is NULL, subsequently queued buffers won't be cropped.\n+ *\n+ * An error is returned if for instance the crop region is invalid, out of the\n+ * buffer's bound or if the window is invalid.\n+ */\n+static inline int native_window_set_crop(\n+        struct ANativeWindow* window,\n+        android_native_rect_t const * crop)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_CROP, crop);\n+}\n+\n+/*\n+ * native_window_set_post_transform_crop(..., crop)\n+ * Sets which region of the next queued buffers needs to be considered.\n+ * Depending on the scaling mode, a buffer's crop region is scaled and/or\n+ * cropped to match the surface's size.  This function sets the crop in\n+ * post-transformed pixel coordinates.\n+ *\n+ * The specified crop region applies to all buffers queued after it is called.\n+ *\n+ * If 'crop' is NULL, subsequently queued buffers won't be cropped.\n+ *\n+ * An error is returned if for instance the crop region is invalid, out of the\n+ * buffer's bound or if the window is invalid.\n+ */\n+static inline int native_window_set_post_transform_crop(\n+        struct ANativeWindow* window,\n+        android_native_rect_t const * crop)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_POST_TRANSFORM_CROP, crop);\n+}\n+\n+/*\n+ * native_window_set_active_rect(..., active_rect)\n+ *\n+ * This function is deprecated and will be removed soon.  For now it simply\n+ * sets the post-transform crop for compatibility while multi-project commits\n+ * get checked.\n+ */\n+static inline int native_window_set_active_rect(\n+        struct ANativeWindow* window,\n+        android_native_rect_t const * active_rect) __deprecated;\n+\n+static inline int native_window_set_active_rect(\n+        struct ANativeWindow* window,\n+        android_native_rect_t const * active_rect)\n+{\n+    return native_window_set_post_transform_crop(window, active_rect);\n+}\n+\n+/*\n+ * native_window_set_buffer_count(..., count)\n+ * Sets the number of buffers associated with this native window.\n+ */\n+static inline int native_window_set_buffer_count(\n+        struct ANativeWindow* window,\n+        size_t bufferCount)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFER_COUNT, bufferCount);\n+}\n+\n+/*\n+ * native_window_set_buffers_geometry(..., int w, int h, int format)\n+ * All buffers dequeued after this call will have the dimensions and format\n+ * specified.  A successful call to this function has the same effect as calling\n+ * native_window_set_buffers_size and native_window_set_buffers_format.\n+ *\n+ * XXX: This function is deprecated.  The native_window_set_buffers_dimensions\n+ * and native_window_set_buffers_format functions should be used instead.\n+ */\n+static inline int native_window_set_buffers_geometry(\n+        struct ANativeWindow* window,\n+        int w, int h, int format) __deprecated;\n+\n+static inline int native_window_set_buffers_geometry(\n+        struct ANativeWindow* window,\n+        int w, int h, int format)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFERS_GEOMETRY,\n+            w, h, format);\n+}\n+\n+/*\n+ * native_window_set_buffers_dimensions(..., int w, int h)\n+ * All buffers dequeued after this call will have the dimensions specified.\n+ * In particular, all buffers will have a fixed-size, independent from the\n+ * native-window size. They will be scaled according to the scaling mode\n+ * (see native_window_set_scaling_mode) upon window composition.\n+ *\n+ * If w and h are 0, the normal behavior is restored. That is, dequeued buffers\n+ * following this call will be sized to match the window's size.\n+ *\n+ * Calling this function will reset the window crop to a NULL value, which\n+ * disables cropping of the buffers.\n+ */\n+static inline int native_window_set_buffers_dimensions(\n+        struct ANativeWindow* window,\n+        int w, int h)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS,\n+            w, h);\n+}\n+\n+/*\n+ * native_window_set_buffers_user_dimensions(..., int w, int h)\n+ *\n+ * Sets the user buffer size for the window, which overrides the\n+ * window's size.  All buffers dequeued after this call will have the\n+ * dimensions specified unless overridden by\n+ * native_window_set_buffers_dimensions.  All buffers will have a\n+ * fixed-size, independent from the native-window size. They will be\n+ * scaled according to the scaling mode (see\n+ * native_window_set_scaling_mode) upon window composition.\n+ *\n+ * If w and h are 0, the normal behavior is restored. That is, the\n+ * default buffer size will match the windows's size.\n+ *\n+ * Calling this function will reset the window crop to a NULL value, which\n+ * disables cropping of the buffers.\n+ */\n+static inline int native_window_set_buffers_user_dimensions(\n+        struct ANativeWindow* window,\n+        int w, int h)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFERS_USER_DIMENSIONS,\n+            w, h);\n+}\n+\n+/*\n+ * native_window_set_buffers_format(..., int format)\n+ * All buffers dequeued after this call will have the format specified.\n+ *\n+ * If the specified format is 0, the default buffer format will be used.\n+ */\n+static inline int native_window_set_buffers_format(\n+        struct ANativeWindow* window,\n+        int format)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFERS_FORMAT, format);\n+}\n+\n+/*\n+ * native_window_set_buffers_data_space(..., int dataSpace)\n+ * All buffers queued after this call will be associated with the dataSpace\n+ * parameter specified.\n+ *\n+ * dataSpace specifies additional information about the buffer that's dependent\n+ * on the buffer format and the endpoints. For example, it can be used to convey\n+ * the color space of the image data in the buffer, or it can be used to\n+ * indicate that the buffers contain depth measurement data instead of color\n+ * images.  The default dataSpace is 0, HAL_DATASPACE_UNKNOWN, unless it has been\n+ * overridden by the consumer.\n+ */\n+static inline int native_window_set_buffers_data_space(\n+        struct ANativeWindow* window,\n+        android_dataspace_t dataSpace)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFERS_DATASPACE,\n+            dataSpace);\n+}\n+\n+/*\n+ * native_window_set_buffers_transform(..., int transform)\n+ * All buffers queued after this call will be displayed transformed according\n+ * to the transform parameter specified.\n+ */\n+static inline int native_window_set_buffers_transform(\n+        struct ANativeWindow* window,\n+        int transform)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFERS_TRANSFORM,\n+            transform);\n+}\n+\n+/*\n+ * native_window_set_buffers_sticky_transform(..., int transform)\n+ * All buffers queued after this call will be displayed transformed according\n+ * to the transform parameter specified applied on top of the regular buffer\n+ * transform.  Setting this transform will disable the transform hint.\n+ *\n+ * Temporary - This is only intended to be used by the LEGACY camera mode, do\n+ *   not use this for anything else.\n+ */\n+static inline int native_window_set_buffers_sticky_transform(\n+        struct ANativeWindow* window,\n+        int transform)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFERS_STICKY_TRANSFORM,\n+            transform);\n+}\n+\n+/*\n+ * native_window_set_buffers_timestamp(..., int64_t timestamp)\n+ * All buffers queued after this call will be associated with the timestamp\n+ * parameter specified. If the timestamp is set to NATIVE_WINDOW_TIMESTAMP_AUTO\n+ * (the default), timestamps will be generated automatically when queueBuffer is\n+ * called. The timestamp is measured in nanoseconds, and is normally monotonically\n+ * increasing. The timestamp should be unaffected by time-of-day adjustments,\n+ * and for a camera should be strictly monotonic but for a media player may be\n+ * reset when the position is set.\n+ */\n+static inline int native_window_set_buffers_timestamp(\n+        struct ANativeWindow* window,\n+        int64_t timestamp)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP,\n+            timestamp);\n+}\n+\n+/*\n+ * native_window_set_scaling_mode(..., int mode)\n+ * All buffers queued after this call will be associated with the scaling mode\n+ * specified.\n+ */\n+static inline int native_window_set_scaling_mode(\n+        struct ANativeWindow* window,\n+        int mode)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_SCALING_MODE,\n+            mode);\n+}\n+\n+/*\n+ * native_window_api_connect(..., int api)\n+ * connects an API to this window. only one API can be connected at a time.\n+ * Returns -EINVAL if for some reason the window cannot be connected, which\n+ * can happen if it's connected to some other API.\n+ */\n+static inline int native_window_api_connect(\n+        struct ANativeWindow* window, int api)\n+{\n+    return window->perform(window, NATIVE_WINDOW_API_CONNECT, api);\n+}\n+\n+/*\n+ * native_window_api_disconnect(..., int api)\n+ * disconnect the API from this window.\n+ * An error is returned if for instance the window wasn't connected in the\n+ * first place.\n+ */\n+static inline int native_window_api_disconnect(\n+        struct ANativeWindow* window, int api)\n+{\n+    return window->perform(window, NATIVE_WINDOW_API_DISCONNECT, api);\n+}\n+\n+/*\n+ * native_window_dequeue_buffer_and_wait(...)\n+ * Dequeue a buffer and wait on the fence associated with that buffer.  The\n+ * buffer may safely be accessed immediately upon this function returning.  An\n+ * error is returned if either of the dequeue or the wait operations fail.\n+ */\n+static inline int native_window_dequeue_buffer_and_wait(ANativeWindow *anw,\n+        struct ANativeWindowBuffer** anb) {\n+    return anw->dequeueBuffer_DEPRECATED(anw, anb);\n+}\n+\n+/*\n+ * native_window_set_sideband_stream(..., native_handle_t*)\n+ * Attach a sideband buffer stream to a native window.\n+ */\n+static inline int native_window_set_sideband_stream(\n+        struct ANativeWindow* window,\n+        native_handle_t* sidebandHandle)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_SIDEBAND_STREAM,\n+            sidebandHandle);\n+}\n+\n+/*\n+ * native_window_set_surface_damage(..., android_native_rect_t* rects, int numRects)\n+ * Set the surface damage (i.e., the region of the surface that has changed\n+ * since the previous frame). The damage set by this call will be reset (to the\n+ * default of full-surface damage) after calling queue, so this must be called\n+ * prior to every frame with damage that does not cover the whole surface if the\n+ * caller desires downstream consumers to use this optimization.\n+ *\n+ * The damage region is specified as an array of rectangles, with the important\n+ * caveat that the origin of the surface is considered to be the bottom-left\n+ * corner, as in OpenGL ES.\n+ *\n+ * If numRects is set to 0, rects may be NULL, and the surface damage will be\n+ * set to the full surface (the same as if this function had not been called for\n+ * this frame).\n+ */\n+static inline int native_window_set_surface_damage(\n+        struct ANativeWindow* window,\n+        const android_native_rect_t* rects, size_t numRects)\n+{\n+    return window->perform(window, NATIVE_WINDOW_SET_SURFACE_DAMAGE,\n+            rects, numRects);\n+}\n+\n+__END_DECLS\n+\n+#endif /* SYSTEM_CORE_INCLUDE_ANDROID_WINDOW_H */\ndiff --git a/meson.build b/meson.build\nindex 13d0605f903c..2575d0abb334 100644\n--- a/meson.build\n+++ b/meson.build\n@@ -51,6 +51,11 @@ add_project_arguments(cpp_arguments, language : 'cpp')\n add_project_link_arguments(cpp_arguments, language : 'cpp')\n \n libcamera_includes = include_directories('include')\n+android_includes = ([\n+    include_directories('include/android/hardware/libhardware/include/'),\n+    include_directories('include/android/system/core/include'),\n+    include_directories('include/android/metadata/'),\n+])\n \n subdir('include')\n subdir('src')\n",
    "prefixes": [
        "libcamera-devel",
        "3/5"
    ]
}