Home | History | Annotate | Download | only in include
      1 /*
      2  * Copyright 2016 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 #ifndef HDR_PLUS_TYPES_H
     17 #define HDR_PLUS_TYPES_H
     18 
     19 #include <array>
     20 #include <stdint.h>
     21 #include <string>
     22 #include <vector>
     23 
     24 namespace pbcamera {
     25 
     26 // This file defines the common types used in HDR+ client and HDR+ service API.
     27 
     28 typedef int32_t status_t;
     29 
     30 /*
     31  * ImageConfiguration and PlaneConfiguration define the layout of a buffer.
     32  * The following is an example of a NV21 buffer.
     33  *
     34  * <-------Y stride (in bytes)------->
     35  * <----width (in pixels)---->
     36  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  ^            ^
     37  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     38  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     39  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  height       Y scanline
     40  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  (in lines)   (in lines)
     41  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     42  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     43  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     44  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     45  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  v            |
     46  * . . . . . . . . . . . . . . . . . .               |
     47  * . . . . . . . . . . . . . . . . . .               v
     48  * <------V/U stride (in bytes)------>
     49  * V U V U V U V U V U V U V U . . . .  ^
     50  * V U V U V U V U V U V U V U . . . .  |
     51  * V U V U V U V U V U V U V U . . . .  |
     52  * V U V U V U V U V U V U V U . . . .  V/U scanline
     53  * V U V U V U V U V U V U V U . . . .  (in lines)
     54  * . . . . . . . . . . . . . . . . . .  |
     55  * . . . . . . . . . . . . . . . . . .  v
     56  * . . . . . . . . . . . . . . . . . .  -> Image padding.
     57  */
     58 
     59 // PlaneConfiguration defines an image planes configuration.
     60 struct PlaneConfiguration {
     61     // Number of bytes in each line including padding.
     62     uint32_t stride;
     63     // Number of lines vertically including padding.
     64     uint32_t scanline;
     65 
     66     PlaneConfiguration() : stride(0), scanline(0) {};
     67 
     68     bool operator==(const PlaneConfiguration &other) const {
     69         return stride == other.stride &&
     70                scanline == other.scanline;
     71     }
     72 
     73     bool operator!=(const PlaneConfiguration &other) const {
     74         return !(*this == other);
     75     }
     76 };
     77 
     78 // ImageConfiguration defines an image configuration.
     79 struct ImageConfiguration {
     80     // Image width.
     81     uint32_t width;
     82     // Image height;
     83     uint32_t height;
     84     // Image format;
     85     int format;
     86     // Configuration for each planes.
     87     std::vector<PlaneConfiguration> planes;
     88     // Number of padded bytes after the last plane.
     89     uint32_t padding;
     90 
     91     ImageConfiguration() : width(0), height(0), format(0), padding(0) {};
     92 
     93     bool operator==(const ImageConfiguration &other) const {
     94         return width == other.width &&
     95                height == other.height &&
     96                format == other.format &&
     97                planes == other.planes &&
     98                padding == other.padding;
     99     }
    100 
    101     bool operator!=(const ImageConfiguration &other) const {
    102         return !(*this == other);
    103     }
    104 };
    105 
    106 /*
    107  * StreamConfiguration defines a stream's configuration, such as its image buffer resolution, used
    108  * during stream configuration.
    109  */
    110 struct StreamConfiguration {
    111     /*
    112      * Unique ID of the stream. Each stream must have an unique ID so it can be used to identify
    113      * the output streams of a StreamBuffer in CaptureRequest.
    114      */
    115     uint32_t id;
    116 
    117     // Image configuration.
    118     ImageConfiguration image;
    119 
    120     bool operator==(const StreamConfiguration &other) const {
    121         return id == other.id &&
    122                image == other.image;
    123     }
    124 
    125     bool operator!=(const StreamConfiguration &other) const {
    126         return !(*this == other);
    127     }
    128 };
    129 
    130 /*
    131  * SensorMode contains the sensor mode information.
    132  */
    133 struct SensorMode {
    134     // Usually 0 is back camera and 1 is front camera.
    135     uint32_t cameraId;
    136 
    137     // Pixel array resolution.
    138     uint32_t pixelArrayWidth;
    139     uint32_t pixelArrayHeight;
    140 
    141     // Active array resolution.
    142     uint32_t activeArrayWidth;
    143     uint32_t activeArrayHeight;
    144 
    145     // Sensor output pixel clock.
    146     uint32_t outputPixelClkHz;
    147 
    148     // Sensor timestamp offset due to gyro calibration. When comparing timestamps between AP and
    149     // Easel, this offset should be subtracted from AP timestamp.
    150     int64_t timestampOffsetNs;
    151 
    152     // Sensor timestamp offset due to sensor cropping. When comparing timestamps between AP and
    153     // Easel, this offset should be subtracted from AP timestamp.
    154     int64_t timestampCropOffsetNs;
    155 
    156     // Sensor output format as defined in android_pixel_format.
    157     int format;
    158 
    159     SensorMode() : cameraId(0), pixelArrayWidth(0), pixelArrayHeight(0), activeArrayWidth(0),
    160                    activeArrayHeight(0), outputPixelClkHz(0) {};
    161 };
    162 
    163 /*
    164  * InputConfiguration defines the input configuration for HDR+ service.
    165  */
    166 struct InputConfiguration {
    167     // Whether the input frames come from sensor MIPI or AP. If true, HDR+ service will get input
    168     // frames from sensor and sensorMode contains the sensor mode information. If false, HDR+
    169     // service will get input frames from AP and streamConfig contains the input stream
    170     // configuration.
    171     bool isSensorInput;
    172     // Sensor mode if isSensorInput is true.
    173     SensorMode sensorMode;
    174     // Input stream configuration if isSensorInput is false.
    175     StreamConfiguration streamConfig;
    176 
    177     InputConfiguration() : isSensorInput(false) {};
    178 };
    179 
    180 /*
    181  * StreamBuffer defines a buffer in a stream.
    182  */
    183 struct StreamBuffer {
    184     // ID of the stream that this buffer belongs to.
    185     uint32_t streamId;
    186     //  DMA buffer fd for this buffer if it's an ION buffer.
    187     int32_t dmaBufFd;
    188     // Pointer to the data of this buffer.
    189     void* data;
    190     // Size of the allocated data.
    191     uint32_t dataSize;
    192 };
    193 
    194 /*
    195  * CaptureRequest defines a capture request that HDR+ client sends to HDR+ service.
    196  */
    197 struct CaptureRequest {
    198     /*
    199      * ID of the capture request. Each capture request must have an unique ID. When HDR+ service
    200      * sends a CaptureResult to HDR+ client for this request, CaptureResult.requestId will be
    201      * assigned to this ID.
    202      */
    203     uint32_t id;
    204     /*
    205      * Output buffers of the request. The buffers will be filled with captured image when HDR+
    206      * service sends the output buffers in CaptureResult.
    207      */
    208     std::vector<StreamBuffer> outputBuffers;
    209 };
    210 
    211 // Util functions used in StaticMetadata and FrameMetadata.
    212 namespace metadatautils {
    213 template<typename T>
    214 void appendValueToString(std::string *strOut, const char* key, T value);
    215 
    216 template<typename T>
    217 void appendVectorOrArrayToString(std::string *strOut, T values);
    218 
    219 template<typename T>
    220 void appendVectorOrArrayToString(std::string *strOut, const char* key, T values);
    221 
    222 template<typename T, size_t SIZE>
    223 void appendVectorArrayToString(std::string *strOut, const char* key,
    224             std::vector<std::array<T, SIZE>> values);
    225 
    226 template<typename T, size_t SIZE>
    227 void appendArrayArrayToString(std::string *strOut, const char* key,
    228             std::array<T, SIZE> values);
    229 } // namespace metadatautils
    230 
    231 static const uint32_t DEBUG_PARAM_NONE                      = 0u;
    232 static const uint32_t DEBUG_PARAM_SAVE_GCAME_INPUT_METERING = (1u);
    233 static const uint32_t DEBUG_PARAM_SAVE_GCAME_INPUT_PAYLOAD  = (1u << 1);
    234 static const uint32_t DEBUG_PARAM_SAVE_GCAME_TEXT           = (1u << 2);
    235 static const uint32_t DEBUG_PARAM_SAVE_PROFILE              = (1u << 3);
    236 static const uint32_t DEBUG_PARAM_SAVE_GCAME_IPU_WATERMARK  = (1u << 4);
    237 
    238 /*
    239  * StaticMetadata defines a camera device's characteristics.
    240  *
    241  * If this structure is changed, serialization in MessengerToHdrPlusService and deserialization in
    242  * MessengerListenerFromHdrPlusClient should also be updated.
    243  */
    244 struct StaticMetadata {
    245     // The following are from Android Camera Metadata
    246     uint8_t flashInfoAvailable; // android.flash.info.available
    247     std::array<int32_t, 2> sensitivityRange; // android.sensor.info.sensitivityRange
    248     int32_t maxAnalogSensitivity; // android.sensor.maxAnalogSensitivity
    249     std::array<int32_t, 2> pixelArraySize; // android.sensor.info.pixelArraySize
    250     std::array<int32_t, 4> activeArraySize; // android.sensor.info.activeArraySize
    251     std::vector<std::array<int32_t, 4>> opticalBlackRegions; // android.sensor.opticalBlackRegions
    252     // android.scaler.availableStreamConfigurations
    253     std::vector<std::array<int32_t, 4>> availableStreamConfigurations;
    254     uint8_t referenceIlluminant1; // android.sensor.referenceIlluminant1
    255     uint8_t referenceIlluminant2; // android.sensor.referenceIlluminant2
    256     std::array<float, 9> calibrationTransform1; // android.sensor.calibrationTransform1
    257     std::array<float, 9> calibrationTransform2; // android.sensor.calibrationTransform2
    258     std::array<float, 9> colorTransform1; // android.sensor.colorTransform1
    259     std::array<float, 9> colorTransform2; // android.sensor.colorTransform2
    260     int32_t whiteLevel; // android.sensor.info.whiteLevel
    261     uint8_t colorFilterArrangement; // android.sensor.info.colorFilterArrangement
    262     std::vector<float> availableApertures; // android.lens.info.availableApertures
    263     std::vector<float> availableFocalLengths; // android.lens.info.availableFocalLengths
    264     std::array<int32_t, 2> shadingMapSize; // android.lens.info.shadingMapSize
    265     uint8_t focusDistanceCalibration; // android.lens.info.focusDistanceCalibration
    266     std::array<int32_t, 2> aeCompensationRange; // android.control.aeCompensationRange
    267     float aeCompensationStep; // android.control.aeCompensationStep
    268 
    269     uint32_t debugParams; // Use HDRPLUS_DEBUG_PARAM_*
    270 
    271     // Convert this static metadata to a string and append it to the specified string.
    272     void appendToString(std::string *strOut) const {
    273         if (strOut == nullptr) return;
    274 
    275         metadatautils::appendValueToString(strOut, "flashInfoAvailable", flashInfoAvailable);
    276         metadatautils::appendVectorOrArrayToString(strOut, "sensitivityRange", sensitivityRange);
    277         metadatautils::appendValueToString(strOut, "maxAnalogSensitivity", maxAnalogSensitivity);
    278         metadatautils::appendVectorOrArrayToString(strOut, "pixelArraySize", pixelArraySize);
    279         metadatautils::appendVectorOrArrayToString(strOut, "activeArraySize", activeArraySize);
    280         metadatautils::appendVectorArrayToString(strOut, "opticalBlackRegions",
    281                 opticalBlackRegions);
    282         metadatautils::appendVectorArrayToString(strOut, "availableStreamConfigurations",
    283                 availableStreamConfigurations);
    284         metadatautils::appendValueToString(strOut, "referenceIlluminant1", referenceIlluminant1);
    285         metadatautils::appendValueToString(strOut, "referenceIlluminant2", referenceIlluminant2);
    286         metadatautils::appendVectorOrArrayToString(strOut, "calibrationTransform1",
    287                 calibrationTransform1);
    288         metadatautils::appendVectorOrArrayToString(strOut, "calibrationTransform2",
    289                 calibrationTransform2);
    290         metadatautils::appendVectorOrArrayToString(strOut, "colorTransform1", colorTransform1);
    291         metadatautils::appendVectorOrArrayToString(strOut, "colorTransform2", colorTransform2);
    292         metadatautils::appendValueToString(strOut, "whiteLevel", whiteLevel);
    293         metadatautils::appendValueToString(strOut, "colorFilterArrangement",
    294                 colorFilterArrangement);
    295         metadatautils::appendVectorOrArrayToString(strOut, "availableApertures",
    296                 availableApertures);
    297         metadatautils::appendVectorOrArrayToString(strOut, "availableFocalLengths",
    298                 availableFocalLengths);
    299         metadatautils::appendVectorOrArrayToString(strOut, "shadingMapSize", shadingMapSize);
    300         metadatautils::appendValueToString(strOut, "focusDistanceCalibration",
    301                 focusDistanceCalibration);
    302         metadatautils::appendVectorOrArrayToString(strOut, "aeCompensationRange",
    303                 aeCompensationRange);
    304         metadatautils::appendValueToString(strOut, "aeCompensationStep",
    305                 aeCompensationStep);
    306         metadatautils::appendValueToString(strOut, "debugParams", debugParams);
    307     }
    308 };
    309 
    310 /*
    311  * FrameMetadata defines properties of a frame captured on AP.
    312  *
    313  * If this structure is changed, serialization in MessengerToHdrPlusService and deserialization in
    314  * MessengerListenerFromHdrPlusClient should also be updated.
    315  */
    316 struct FrameMetadata {
    317     int64_t easelTimestamp; // Easel timestamp
    318 
    319     // The following are from Android Camera Metadata
    320     int64_t exposureTime; // android.sensor.exposureTime
    321     int32_t sensitivity; // android.sensor.sensitivity
    322     int32_t postRawSensitivityBoost; // android.control.postRawSensitivityBoost
    323     uint8_t flashMode; // android.flash.mode
    324     std::array<float, 4> colorCorrectionGains; // android.colorCorrection.gains
    325     std::array<float, 9> colorCorrectionTransform; // android.colorCorrection.transform
    326     std::array<float, 3> neutralColorPoint; // android.sensor.neutralColorPoint
    327     int64_t timestamp; // android.sensor.timestamp
    328     uint8_t blackLevelLock; // android.blackLevel.lock
    329     uint8_t faceDetectMode; // android.statistics.faceDetectMode
    330     std::vector<int32_t> faceIds; // android.statistics.faceIds
    331     std::vector<std::array<int32_t, 6>> faceLandmarks; // android.statistics.faceLandmarks
    332     std::vector<std::array<int32_t, 4>> faceRectangles; // android.statistics.faceRectangles
    333     std::vector<uint8_t> faceScores; // android.statistics.faceScores
    334     uint8_t sceneFlicker; // android.statistics.sceneFlicker
    335     std::array<std::array<double, 2>, 4> noiseProfile; // android.sensor.noiseProfile
    336     std::array<float, 4> dynamicBlackLevel; // android.sensor.dynamicBlackLevel
    337     std::vector<float> lensShadingMap; // android.statistics.lensShadingMap
    338     float focusDistance; // android.lens.focusDistance
    339     int32_t aeExposureCompensation; // android.control.aeExposureCompensation
    340     uint8_t aeMode; // android.control.aeMode
    341     uint8_t aeLock; // android.control.aeLock
    342     uint8_t aeState; // android.control.aeState
    343     uint8_t aePrecaptureTrigger; // android.control.aePrecaptureTrigger
    344     std::vector<std::array<int32_t, 5>> aeRegions; // android.control.aeRegions
    345 
    346     // Convert this static metadata to a string and append it to the specified string.
    347     void appendToString(std::string *strOut) const {
    348         if (strOut == nullptr) return;
    349 
    350         metadatautils::appendValueToString(strOut, "easelTimestamp", easelTimestamp);
    351         metadatautils::appendValueToString(strOut, "exposureTime", exposureTime);
    352         metadatautils::appendValueToString(strOut, "sensitivity", sensitivity);
    353         metadatautils::appendValueToString(strOut, "postRawSensitivityBoost",
    354                 postRawSensitivityBoost);
    355         metadatautils::appendValueToString(strOut, "flashMode", flashMode);
    356         metadatautils::appendVectorOrArrayToString(strOut, "colorCorrectionGains",
    357                 colorCorrectionGains);
    358         metadatautils::appendVectorOrArrayToString(strOut, "colorCorrectionTransform",
    359                 colorCorrectionTransform);
    360         metadatautils::appendVectorOrArrayToString(strOut, "neutralColorPoint", neutralColorPoint);
    361         metadatautils::appendValueToString(strOut, "timestamp", timestamp);
    362         metadatautils::appendValueToString(strOut, "blackLevelLock", blackLevelLock);
    363         metadatautils::appendValueToString(strOut, "faceDetectMode", faceDetectMode);
    364         metadatautils::appendVectorOrArrayToString(strOut, "faceIds", faceIds);
    365         metadatautils::appendVectorArrayToString(strOut, "faceLandmarks", faceLandmarks);
    366         metadatautils::appendVectorArrayToString(strOut, "faceRectangles", faceRectangles);
    367         metadatautils::appendVectorOrArrayToString(strOut, "faceScores", faceScores);
    368         metadatautils::appendArrayArrayToString(strOut, "noiseProfile", noiseProfile);
    369         metadatautils::appendValueToString(strOut, "sceneFlicker", sceneFlicker);
    370         metadatautils::appendVectorOrArrayToString(strOut, "dynamicBlackLevel", dynamicBlackLevel);
    371         metadatautils::appendVectorOrArrayToString(strOut, "lensShadingMap", lensShadingMap);
    372         metadatautils::appendValueToString(strOut, "focusDistance", focusDistance);
    373         metadatautils::appendValueToString(strOut, "aeExposureCompensation", aeExposureCompensation);
    374         metadatautils::appendValueToString(strOut, "aeMode", aeMode);
    375         metadatautils::appendValueToString(strOut, "aeLock", aeLock);
    376         metadatautils::appendValueToString(strOut, "aeState", aeState);
    377         metadatautils::appendValueToString(strOut, "aePrecaptureTrigger", aePrecaptureTrigger);
    378         metadatautils::appendVectorArrayToString(strOut, "aeRegions", aeRegions);
    379     }
    380 };
    381 
    382 /*
    383  * RequestMetadata defines the properties for a capture request.
    384  *
    385  * If this structure is changed, serialization in MessengerToHdrPlusClient and deserialization in
    386  * MessengerListenerFromHdrPlusService should also be updated.
    387  */
    388 struct RequestMetadata {
    389     std::array<int32_t, 4> cropRegion; // android.scaler.cropRegion (x_min, y_min, width, height)
    390     int32_t aeExposureCompensation; // android.control.aeExposureCompensation
    391 
    392     bool postviewEnable; // com.google.nexus.experimental2017.stats.postview_enable
    393     bool continuousCapturing; // Whether to capture RAW while HDR+ processing.
    394 
    395     // Convert this static metadata to a string and append it to the specified string.
    396     void appendToString(std::string *strOut) const {
    397         if (strOut == nullptr) return;
    398         metadatautils::appendVectorOrArrayToString(strOut, "cropRegion", cropRegion);
    399         metadatautils::appendValueToString(strOut, "aeExposureCompensation", aeExposureCompensation);
    400         metadatautils::appendValueToString(strOut, "postviewEnable", postviewEnable);
    401         metadatautils::appendValueToString(strOut, "continuousCapturing", continuousCapturing);
    402     }
    403 };
    404 
    405 /*
    406  * ResultMetadata defines a process frame's properties that have been modified due to processing.
    407  *
    408  * If this structure is changed, serialization in MessengerToHdrPlusClient and deserialization in
    409  * MessengerListenerFromHdrPlusService should also be updated.
    410  */
    411 struct ResultMetadata {
    412     int64_t easelTimestamp; // Easel timestamp of SOF of the base frame.
    413     int64_t timestamp; // android.sensor.timestamp. AP timestamp of exposure start of the base
    414                        // frame.
    415     std::string makernote; // Obfuscated capture information.
    416 
    417     // Convert this static metadata to a string and append it to the specified string.
    418     void appendToString(std::string *strOut) const {
    419         if (strOut == nullptr) return;
    420         metadatautils::appendValueToString(strOut, "easelTimestamp", easelTimestamp);
    421         metadatautils::appendValueToString(strOut, "timestamp", timestamp);
    422         metadatautils::appendValueToString(strOut, "makernote", makernote.size());
    423     }
    424 };
    425 
    426 /*
    427  * CaptureResult defines a capture result that HDR+ service returns to HDR+ client.
    428  */
    429 struct CaptureResult {
    430     /*
    431      * ID of the CaptureRequest that this capture result corresponds to. It can be used to match
    432      * the original CaptureRequest when the HDR+ client receives this result.
    433      */
    434     uint32_t requestId;
    435     /*
    436      * Output buffers filled with processed frame by HDR+ service.
    437      */
    438     std::vector<StreamBuffer> outputBuffers;
    439 
    440     /*
    441      * Result metadata including modified properties due to processing.
    442      */
    443     ResultMetadata metadata;
    444 };
    445 
    446 // Util functions used in StaticMetadata and FrameMetadata.
    447 namespace metadatautils {
    448 
    449 /*
    450  * Append a key and a value to a string.
    451  *
    452  * strOut is the string to append a key and a value to.
    453  * key is the name of the data.
    454  * value is the value of the data.
    455  */
    456 template<typename T>
    457 void appendValueToString(std::string *strOut, const char* key, T value) {
    458     if (strOut == nullptr) return;
    459     (*strOut) += std::string(key) + ": " + std::to_string(value) + "\n";
    460 }
    461 
    462 /*
    463  * Append a vector or an array of values to a string.
    464  *
    465  * strOut is the string to append a key and values to.
    466  * values is a vector or an array containing values to append to the string.
    467  */
    468 template<typename T>
    469 void appendVectorOrArrayToString(std::string *strOut, T values) {
    470     if (strOut == nullptr) return;
    471     for (size_t i = 0; i < values.size(); i++) {
    472         (*strOut) += std::to_string(values[i]);
    473         if (i != values.size() - 1) {
    474             (*strOut) +=", ";
    475         }
    476     }
    477 }
    478 
    479 /*
    480  * Append a key and a vector or an array of values to a string.
    481  *
    482  * strOut is the string to append a key and values to.
    483  * key is the name of the data.
    484  * values is a vector or an array containing values to append to the string.
    485  */
    486 template<typename T>
    487 void appendVectorOrArrayToString(std::string *strOut, const char* key, T values) {
    488     if (strOut == nullptr) return;
    489     (*strOut) += std::string(key) + ": ";
    490     appendVectorOrArrayToString(strOut, values);
    491     (*strOut) += "\n";
    492 }
    493 
    494 /*
    495  * Append a key and a vector of arrays to a string.
    496  *
    497  * strOut is the string to append a key and values to.
    498  * key is the name of the data.
    499  * values is a vector of arrays containing values to append to the string.
    500  */
    501 template<typename T, size_t SIZE>
    502 void appendVectorArrayToString(std::string *strOut, const char* key,
    503             std::vector<std::array<T, SIZE>> values) {
    504     if (strOut == nullptr) return;
    505     (*strOut) += std::string(key) + ": ";
    506     for (size_t i = 0; i < values.size(); i++) {
    507         appendVectorOrArrayToString(strOut, values[i]);
    508         if (i != values.size() - 1) {
    509             (*strOut) +=", ";
    510         }
    511     }
    512     (*strOut) += "\n";
    513 }
    514 
    515 /*
    516  * Append a key and an array of arrays to a string.
    517  *
    518  * strOut is the string to append a key and values to.
    519  * key is the name of the data.
    520  * values is an array of arrays containing values to append to the string.
    521  */
    522 template<typename T, size_t SIZE>
    523 void appendArrayArrayToString(std::string *strOut, const char* key,
    524             std::array<T, SIZE> values) {
    525     if (strOut == nullptr) return;
    526     (*strOut) += std::string(key) + ": ";
    527     for (size_t i = 0; i < values.size(); i++) {
    528         appendVectorOrArrayToString(strOut, values[i]);
    529         if (i != values.size() - 1) {
    530             (*strOut) +=", ";
    531         }
    532     }
    533     (*strOut) += "\n";
    534 }
    535 
    536 } // namespace metadatautils
    537 
    538 } // namespace pbcamera
    539 
    540 #endif // HDR_PLUS_TYPES_H
    541