Home | History | Annotate | Download | only in include
      1 /*
      2  * Copyright 2016 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 #ifndef HDR_PLUS_TYPES_H
     17 #define HDR_PLUS_TYPES_H
     18 
     19 #include <array>
     20 #include <stdint.h>
     21 #include <string>
     22 #include <vector>
     23 
     24 namespace pbcamera {
     25 
     26 // This file defines the common types used in HDR+ client and HDR+ service API.
     27 
     28 typedef int32_t status_t;
     29 
     30 /*
     31  * ImageConfiguration and PlaneConfiguration define the layout of a buffer.
     32  * The following is an example of a NV21 buffer.
     33  *
     34  * <-------Y stride (in bytes)------->
     35  * <----width (in pixels)---->
     36  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  ^            ^
     37  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     38  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     39  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  height       Y scanline
     40  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  (in lines)   (in lines)
     41  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     42  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     43  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     44  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  |            |
     45  * Y Y Y Y Y Y Y Y Y Y Y Y Y Y . . . .  v            |
     46  * . . . . . . . . . . . . . . . . . .               |
     47  * . . . . . . . . . . . . . . . . . .               v
     48  * <------V/U stride (in bytes)------>
     49  * V U V U V U V U V U V U V U . . . .  ^
     50  * V U V U V U V U V U V U V U . . . .  |
     51  * V U V U V U V U V U V U V U . . . .  |
     52  * V U V U V U V U V U V U V U . . . .  V/U scanline
     53  * V U V U V U V U V U V U V U . . . .  (in lines)
     54  * . . . . . . . . . . . . . . . . . .  |
     55  * . . . . . . . . . . . . . . . . . .  v
     56  * . . . . . . . . . . . . . . . . . .  -> Image padding.
     57  */
     58 
     59 // PlaneConfiguration defines an image planes configuration.
     60 struct PlaneConfiguration {
     61     // Number of bytes in each line including padding.
     62     uint32_t stride;
     63     // Number of lines vertically including padding.
     64     uint32_t scanline;
     65 
     66     PlaneConfiguration() : stride(0), scanline(0) {};
     67 
     68     bool operator==(const PlaneConfiguration &other) const {
     69         return stride == other.stride &&
     70                scanline == other.scanline;
     71     }
     72 
     73     bool operator!=(const PlaneConfiguration &other) const {
     74         return !(*this == other);
     75     }
     76 };
     77 
     78 // ImageConfiguration defines an image configuration.
     79 struct ImageConfiguration {
     80     // Image width.
     81     uint32_t width;
     82     // Image height;
     83     uint32_t height;
     84     // Image format;
     85     int format;
     86     // Configuration for each planes.
     87     std::vector<PlaneConfiguration> planes;
     88     // Number of padded bytes after the last plane.
     89     uint32_t padding;
     90 
     91     ImageConfiguration() : width(0), height(0), format(0), padding(0) {};
     92 
     93     bool operator==(const ImageConfiguration &other) const {
     94         return width == other.width &&
     95                height == other.height &&
     96                format == other.format &&
     97                planes == other.planes &&
     98                padding == other.padding;
     99     }
    100 
    101     bool operator!=(const ImageConfiguration &other) const {
    102         return !(*this == other);
    103     }
    104 };
    105 
    106 /*
    107  * StreamConfiguration defines a stream's configuration, such as its image buffer resolution, used
    108  * during stream configuration.
    109  */
    110 struct StreamConfiguration {
    111     /*
    112      * Unique ID of the stream. Each stream must have an unique ID so it can be used to identify
    113      * the output streams of a StreamBuffer in CaptureRequest.
    114      */
    115     uint32_t id;
    116 
    117     // Image configuration.
    118     ImageConfiguration image;
    119 
    120     bool operator==(const StreamConfiguration &other) const {
    121         return id == other.id &&
    122                image == other.image;
    123     }
    124 
    125     bool operator!=(const StreamConfiguration &other) const {
    126         return !(*this == other);
    127     }
    128 };
    129 
    130 /*
    131  * SensorMode contains the sensor mode information.
    132  */
    133 struct SensorMode {
    134     // Usually 0 is back camera and 1 is front camera.
    135     uint32_t cameraId;
    136 
    137     // Pixel array resolution.
    138     uint32_t pixelArrayWidth;
    139     uint32_t pixelArrayHeight;
    140 
    141     // Active array resolution.
    142     uint32_t activeArrayWidth;
    143     uint32_t activeArrayHeight;
    144 
    145     // Sensor output pixel clock.
    146     uint32_t outputPixelClkHz;
    147 
    148     // Sensor timestamp offset due to gyro calibration. When comparing timestamps between AP and
    149     // Easel, this offset should be subtracted from AP timestamp.
    150     int64_t timestampOffsetNs;
    151 
    152     // Sensor timestamp offset due to sensor cropping. When comparing timestamps between AP and
    153     // Easel, this offset should be subtracted from AP timestamp.
    154     int64_t timestampCropOffsetNs;
    155 
    156     // Sensor output format as defined in android_pixel_format.
    157     int format;
    158 
    159     SensorMode() : cameraId(0), pixelArrayWidth(0), pixelArrayHeight(0), activeArrayWidth(0),
    160                    activeArrayHeight(0), outputPixelClkHz(0) {};
    161 };
    162 
    163 /*
    164  * InputConfiguration defines the input configuration for HDR+ service.
    165  */
    166 struct InputConfiguration {
    167     // Whether the input frames come from sensor MIPI or AP. If true, HDR+ service will get input
    168     // frames from sensor and sensorMode contains the sensor mode information. If false, HDR+
    169     // service will get input frames from AP and streamConfig contains the input stream
    170     // configuration.
    171     bool isSensorInput;
    172     // Sensor mode if isSensorInput is true.
    173     SensorMode sensorMode;
    174     // Input stream configuration if isSensorInput is false.
    175     StreamConfiguration streamConfig;
    176 
    177     InputConfiguration() : isSensorInput(false) {};
    178 };
    179 
    180 /*
    181  * StreamBuffer defines a buffer in a stream.
    182  */
    183 struct StreamBuffer {
    184     // ID of the stream that this buffer belongs to.
    185     uint32_t streamId;
    186     //  DMA buffer fd for this buffer if it's an ION buffer.
    187     int32_t dmaBufFd;
    188     // Pointer to the data of this buffer.
    189     void* data;
    190     // Size of the allocated data.
    191     uint32_t dataSize;
    192 };
    193 
    194 /*
    195  * CaptureRequest defines a capture request that HDR+ client sends to HDR+ service.
    196  */
    197 struct CaptureRequest {
    198     /*
    199      * ID of the capture request. Each capture request must have an unique ID. When HDR+ service
    200      * sends a CaptureResult to HDR+ client for this request, CaptureResult.requestId will be
    201      * assigned to this ID.
    202      */
    203     uint32_t id;
    204     /*
    205      * Output buffers of the request. The buffers will be filled with captured image when HDR+
    206      * service sends the output buffers in CaptureResult.
    207      */
    208     std::vector<StreamBuffer> outputBuffers;
    209 };
    210 
    211 // Util functions used in StaticMetadata and FrameMetadata.
    212 namespace metadatautils {
    213 template<typename T>
    214 void appendValueToString(std::string *strOut, const char* key, T value);
    215 
    216 template<typename T>
    217 void appendVectorOrArrayToString(std::string *strOut, T values);
    218 
    219 template<typename T>
    220 void appendVectorOrArrayToString(std::string *strOut, const char* key, T values);
    221 
    222 template<typename T, size_t SIZE>
    223 void appendVectorArrayToString(std::string *strOut, const char* key,
    224             std::vector<std::array<T, SIZE>> values);
    225 
    226 template<typename T, size_t SIZE>
    227 void appendArrayArrayToString(std::string *strOut, const char* key,
    228             std::array<T, SIZE> values);
    229 } // namespace metadatautils
    230 
    231 static const uint32_t DEBUG_PARAM_NONE                      = 0u;
    232 static const uint32_t DEBUG_PARAM_SAVE_GCAME_INPUT_METERING = (1u);
    233 static const uint32_t DEBUG_PARAM_SAVE_GCAME_INPUT_PAYLOAD  = (1u << 1);
    234 static const uint32_t DEBUG_PARAM_SAVE_GCAME_TEXT           = (1u << 2);
    235 
    236 /*
    237  * StaticMetadata defines a camera device's characteristics.
    238  *
    239  * If this structure is changed, serialization in MessengerToHdrPlusService and deserialization in
    240  * MessengerListenerFromHdrPlusClient should also be updated.
    241  */
    242 struct StaticMetadata {
    243     // The following are from Android Camera Metadata
    244     uint8_t flashInfoAvailable; // android.flash.info.available
    245     std::array<int32_t, 2> sensitivityRange; // android.sensor.info.sensitivityRange
    246     int32_t maxAnalogSensitivity; // android.sensor.maxAnalogSensitivity
    247     std::array<int32_t, 2> pixelArraySize; // android.sensor.info.pixelArraySize
    248     std::array<int32_t, 4> activeArraySize; // android.sensor.info.activeArraySize
    249     std::vector<std::array<int32_t, 4>> opticalBlackRegions; // android.sensor.opticalBlackRegions
    250     // android.scaler.availableStreamConfigurations
    251     std::vector<std::array<int32_t, 4>> availableStreamConfigurations;
    252     uint8_t referenceIlluminant1; // android.sensor.referenceIlluminant1
    253     uint8_t referenceIlluminant2; // android.sensor.referenceIlluminant2
    254     std::array<float, 9> calibrationTransform1; // android.sensor.calibrationTransform1
    255     std::array<float, 9> calibrationTransform2; // android.sensor.calibrationTransform2
    256     std::array<float, 9> colorTransform1; // android.sensor.colorTransform1
    257     std::array<float, 9> colorTransform2; // android.sensor.colorTransform2
    258     int32_t whiteLevel; // android.sensor.info.whiteLevel
    259     uint8_t colorFilterArrangement; // android.sensor.info.colorFilterArrangement
    260     std::vector<float> availableApertures; // android.lens.info.availableApertures
    261     std::vector<float> availableFocalLengths; // android.lens.info.availableFocalLengths
    262     std::array<int32_t, 2> shadingMapSize; // android.lens.info.shadingMapSize
    263     uint8_t focusDistanceCalibration; // android.lens.info.focusDistanceCalibration
    264     std::array<int32_t, 2> aeCompensationRange; // android.control.aeCompensationRange
    265     float aeCompensationStep; // android.control.aeCompensationStep
    266 
    267     uint32_t debugParams; // Use HDRPLUS_DEBUG_PARAM_*
    268 
    269     // Convert this static metadata to a string and append it to the specified string.
    270     void appendToString(std::string *strOut) const {
    271         if (strOut == nullptr) return;
    272 
    273         metadatautils::appendValueToString(strOut, "flashInfoAvailable", flashInfoAvailable);
    274         metadatautils::appendVectorOrArrayToString(strOut, "sensitivityRange", sensitivityRange);
    275         metadatautils::appendValueToString(strOut, "maxAnalogSensitivity", maxAnalogSensitivity);
    276         metadatautils::appendVectorOrArrayToString(strOut, "pixelArraySize", pixelArraySize);
    277         metadatautils::appendVectorOrArrayToString(strOut, "activeArraySize", activeArraySize);
    278         metadatautils::appendVectorArrayToString(strOut, "opticalBlackRegions",
    279                 opticalBlackRegions);
    280         metadatautils::appendVectorArrayToString(strOut, "availableStreamConfigurations",
    281                 availableStreamConfigurations);
    282         metadatautils::appendValueToString(strOut, "referenceIlluminant1", referenceIlluminant1);
    283         metadatautils::appendValueToString(strOut, "referenceIlluminant2", referenceIlluminant2);
    284         metadatautils::appendVectorOrArrayToString(strOut, "calibrationTransform1",
    285                 calibrationTransform1);
    286         metadatautils::appendVectorOrArrayToString(strOut, "calibrationTransform2",
    287                 calibrationTransform2);
    288         metadatautils::appendVectorOrArrayToString(strOut, "colorTransform1", colorTransform1);
    289         metadatautils::appendVectorOrArrayToString(strOut, "colorTransform2", colorTransform2);
    290         metadatautils::appendValueToString(strOut, "whiteLevel", whiteLevel);
    291         metadatautils::appendValueToString(strOut, "colorFilterArrangement",
    292                 colorFilterArrangement);
    293         metadatautils::appendVectorOrArrayToString(strOut, "availableApertures",
    294                 availableApertures);
    295         metadatautils::appendVectorOrArrayToString(strOut, "availableFocalLengths",
    296                 availableFocalLengths);
    297         metadatautils::appendVectorOrArrayToString(strOut, "shadingMapSize", shadingMapSize);
    298         metadatautils::appendValueToString(strOut, "focusDistanceCalibration",
    299                 focusDistanceCalibration);
    300         metadatautils::appendVectorOrArrayToString(strOut, "aeCompensationRange",
    301                 aeCompensationRange);
    302         metadatautils::appendValueToString(strOut, "aeCompensationStep",
    303                 aeCompensationStep);
    304         metadatautils::appendValueToString(strOut, "debugParams", debugParams);
    305     }
    306 };
    307 
    308 /*
    309  * FrameMetadata defines properties of a frame captured on AP.
    310  *
    311  * If this structure is changed, serialization in MessengerToHdrPlusService and deserialization in
    312  * MessengerListenerFromHdrPlusClient should also be updated.
    313  */
    314 struct FrameMetadata {
    315     int64_t easelTimestamp; // Easel timestamp
    316 
    317     // The following are from Android Camera Metadata
    318     int64_t exposureTime; // android.sensor.exposureTime
    319     int32_t sensitivity; // android.sensor.sensitivity
    320     int32_t postRawSensitivityBoost; // android.control.postRawSensitivityBoost
    321     uint8_t flashMode; // android.flash.mode
    322     std::array<float, 4> colorCorrectionGains; // android.colorCorrection.gains
    323     std::array<float, 9> colorCorrectionTransform; // android.colorCorrection.transform
    324     std::array<float, 3> neutralColorPoint; // android.sensor.neutralColorPoint
    325     int64_t timestamp; // android.sensor.timestamp
    326     uint8_t blackLevelLock; // android.blackLevel.lock
    327     uint8_t faceDetectMode; // android.statistics.faceDetectMode
    328     std::vector<int32_t> faceIds; // android.statistics.faceIds
    329     std::vector<std::array<int32_t, 6>> faceLandmarks; // android.statistics.faceLandmarks
    330     std::vector<std::array<int32_t, 4>> faceRectangles; // android.statistics.faceRectangles
    331     std::vector<uint8_t> faceScores; // android.statistics.faceScores
    332     uint8_t sceneFlicker; // android.statistics.sceneFlicker
    333     std::array<std::array<double, 2>, 4> noiseProfile; // android.sensor.noiseProfile
    334     std::array<float, 4> dynamicBlackLevel; // android.sensor.dynamicBlackLevel
    335     std::vector<float> lensShadingMap; // android.statistics.lensShadingMap
    336     float focusDistance; // android.lens.focusDistance
    337     int32_t aeExposureCompensation; // android.control.aeExposureCompensation
    338     uint8_t aeMode; // android.control.aeMode
    339     uint8_t aeLock; // android.control.aeLock
    340     uint8_t aeState; // android.control.aeState
    341     uint8_t aePrecaptureTrigger; // android.control.aePrecaptureTrigger
    342     std::vector<std::array<int32_t, 5>> aeRegions; // android.control.aeRegions
    343 
    344     // Convert this static metadata to a string and append it to the specified string.
    345     void appendToString(std::string *strOut) const {
    346         if (strOut == nullptr) return;
    347 
    348         metadatautils::appendValueToString(strOut, "easelTimestamp", easelTimestamp);
    349         metadatautils::appendValueToString(strOut, "exposureTime", exposureTime);
    350         metadatautils::appendValueToString(strOut, "sensitivity", sensitivity);
    351         metadatautils::appendValueToString(strOut, "postRawSensitivityBoost",
    352                 postRawSensitivityBoost);
    353         metadatautils::appendValueToString(strOut, "flashMode", flashMode);
    354         metadatautils::appendVectorOrArrayToString(strOut, "colorCorrectionGains",
    355                 colorCorrectionGains);
    356         metadatautils::appendVectorOrArrayToString(strOut, "colorCorrectionTransform",
    357                 colorCorrectionTransform);
    358         metadatautils::appendVectorOrArrayToString(strOut, "neutralColorPoint", neutralColorPoint);
    359         metadatautils::appendValueToString(strOut, "timestamp", timestamp);
    360         metadatautils::appendValueToString(strOut, "blackLevelLock", blackLevelLock);
    361         metadatautils::appendValueToString(strOut, "faceDetectMode", faceDetectMode);
    362         metadatautils::appendVectorOrArrayToString(strOut, "faceIds", faceIds);
    363         metadatautils::appendVectorArrayToString(strOut, "faceLandmarks", faceLandmarks);
    364         metadatautils::appendVectorArrayToString(strOut, "faceRectangles", faceRectangles);
    365         metadatautils::appendVectorOrArrayToString(strOut, "faceScores", faceScores);
    366         metadatautils::appendArrayArrayToString(strOut, "noiseProfile", noiseProfile);
    367         metadatautils::appendValueToString(strOut, "sceneFlicker", sceneFlicker);
    368         metadatautils::appendVectorOrArrayToString(strOut, "dynamicBlackLevel", dynamicBlackLevel);
    369         metadatautils::appendVectorOrArrayToString(strOut, "lensShadingMap", lensShadingMap);
    370         metadatautils::appendValueToString(strOut, "focusDistance", focusDistance);
    371         metadatautils::appendValueToString(strOut, "aeExposureCompensation", aeExposureCompensation);
    372         metadatautils::appendValueToString(strOut, "aeMode", aeMode);
    373         metadatautils::appendValueToString(strOut, "aeLock", aeLock);
    374         metadatautils::appendValueToString(strOut, "aeState", aeState);
    375         metadatautils::appendValueToString(strOut, "aePrecaptureTrigger", aePrecaptureTrigger);
    376         metadatautils::appendVectorArrayToString(strOut, "aeRegions", aeRegions);
    377     }
    378 };
    379 
    380 /*
    381  * RequestMetadata defines the properties for a capture request.
    382  *
    383  * If this structure is changed, serialization in MessengerToHdrPlusClient and deserialization in
    384  * MessengerListenerFromHdrPlusService should also be updated.
    385  */
    386 struct RequestMetadata {
    387     std::array<int32_t, 4> cropRegion; // android.scaler.cropRegion (x_min, y_min, width, height)
    388     int32_t aeExposureCompensation; // android.control.aeExposureCompensation
    389 
    390     bool postviewEnable; // com.google.nexus.experimental2017.stats.postview_enable
    391     bool continuousCapturing; // Whether to capture RAW while HDR+ processing.
    392 
    393     // Convert this static metadata to a string and append it to the specified string.
    394     void appendToString(std::string *strOut) const {
    395         if (strOut == nullptr) return;
    396         metadatautils::appendVectorOrArrayToString(strOut, "cropRegion", cropRegion);
    397         metadatautils::appendValueToString(strOut, "aeExposureCompensation", aeExposureCompensation);
    398         metadatautils::appendValueToString(strOut, "postviewEnable", postviewEnable);
    399         metadatautils::appendValueToString(strOut, "continuousCapturing", continuousCapturing);
    400     }
    401 };
    402 
    403 /*
    404  * ResultMetadata defines a process frame's properties that have been modified due to processing.
    405  *
    406  * If this structure is changed, serialization in MessengerToHdrPlusClient and deserialization in
    407  * MessengerListenerFromHdrPlusService should also be updated.
    408  */
    409 struct ResultMetadata {
    410     int64_t easelTimestamp; // Easel timestamp of SOF of the base frame.
    411     int64_t timestamp; // android.sensor.timestamp. AP timestamp of exposure start of the base
    412                        // frame.
    413     std::string makernote; // Obfuscated capture information.
    414 
    415     // Convert this static metadata to a string and append it to the specified string.
    416     void appendToString(std::string *strOut) const {
    417         if (strOut == nullptr) return;
    418         metadatautils::appendValueToString(strOut, "easelTimestamp", easelTimestamp);
    419         metadatautils::appendValueToString(strOut, "timestamp", timestamp);
    420         metadatautils::appendValueToString(strOut, "makernote", makernote.size());
    421     }
    422 };
    423 
    424 /*
    425  * CaptureResult defines a capture result that HDR+ service returns to HDR+ client.
    426  */
    427 struct CaptureResult {
    428     /*
    429      * ID of the CaptureRequest that this capture result corresponds to. It can be used to match
    430      * the original CaptureRequest when the HDR+ client receives this result.
    431      */
    432     uint32_t requestId;
    433     /*
    434      * Output buffers filled with processed frame by HDR+ service.
    435      */
    436     std::vector<StreamBuffer> outputBuffers;
    437 
    438     /*
    439      * Result metadata including modified properties due to processing.
    440      */
    441     ResultMetadata metadata;
    442 };
    443 
    444 // Util functions used in StaticMetadata and FrameMetadata.
    445 namespace metadatautils {
    446 
    447 /*
    448  * Append a key and a value to a string.
    449  *
    450  * strOut is the string to append a key and a value to.
    451  * key is the name of the data.
    452  * value is the value of the data.
    453  */
    454 template<typename T>
    455 void appendValueToString(std::string *strOut, const char* key, T value) {
    456     if (strOut == nullptr) return;
    457     (*strOut) += std::string(key) + ": " + std::to_string(value) + "\n";
    458 }
    459 
    460 /*
    461  * Append a vector or an array of values to a string.
    462  *
    463  * strOut is the string to append a key and values to.
    464  * values is a vector or an array containing values to append to the string.
    465  */
    466 template<typename T>
    467 void appendVectorOrArrayToString(std::string *strOut, T values) {
    468     if (strOut == nullptr) return;
    469     for (size_t i = 0; i < values.size(); i++) {
    470         (*strOut) += std::to_string(values[i]);
    471         if (i != values.size() - 1) {
    472             (*strOut) +=", ";
    473         }
    474     }
    475 }
    476 
    477 /*
    478  * Append a key and a vector or an array of values to a string.
    479  *
    480  * strOut is the string to append a key and values to.
    481  * key is the name of the data.
    482  * values is a vector or an array containing values to append to the string.
    483  */
    484 template<typename T>
    485 void appendVectorOrArrayToString(std::string *strOut, const char* key, T values) {
    486     if (strOut == nullptr) return;
    487     (*strOut) += std::string(key) + ": ";
    488     appendVectorOrArrayToString(strOut, values);
    489     (*strOut) += "\n";
    490 }
    491 
    492 /*
    493  * Append a key and a vector of arrays to a string.
    494  *
    495  * strOut is the string to append a key and values to.
    496  * key is the name of the data.
    497  * values is a vector of arrays containing values to append to the string.
    498  */
    499 template<typename T, size_t SIZE>
    500 void appendVectorArrayToString(std::string *strOut, const char* key,
    501             std::vector<std::array<T, SIZE>> values) {
    502     if (strOut == nullptr) return;
    503     (*strOut) += std::string(key) + ": ";
    504     for (size_t i = 0; i < values.size(); i++) {
    505         appendVectorOrArrayToString(strOut, values[i]);
    506         if (i != values.size() - 1) {
    507             (*strOut) +=", ";
    508         }
    509     }
    510     (*strOut) += "\n";
    511 }
    512 
    513 /*
    514  * Append a key and an array of arrays to a string.
    515  *
    516  * strOut is the string to append a key and values to.
    517  * key is the name of the data.
    518  * values is an array of arrays containing values to append to the string.
    519  */
    520 template<typename T, size_t SIZE>
    521 void appendArrayArrayToString(std::string *strOut, const char* key,
    522             std::array<T, SIZE> values) {
    523     if (strOut == nullptr) return;
    524     (*strOut) += std::string(key) + ": ";
    525     for (size_t i = 0; i < values.size(); i++) {
    526         appendVectorOrArrayToString(strOut, values[i]);
    527         if (i != values.size() - 1) {
    528             (*strOut) +=", ";
    529         }
    530     }
    531     (*strOut) += "\n";
    532 }
    533 
    534 } // namespace metadatautils
    535 
    536 } // namespace pbcamera
    537 
    538 #endif // HDR_PLUS_TYPES_H
    539