Home | History | Annotate | Download | only in util
      1 /*
      2  * Copyright (C) 2017 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package com.android.apksig.internal.util;
     18 
     19 import com.android.apksig.internal.zip.ZipUtils;
     20 import com.android.apksig.util.DataSink;
     21 import com.android.apksig.util.DataSource;
     22 import com.android.apksig.util.DataSources;
     23 
     24 import java.io.IOException;
     25 import java.nio.ByteBuffer;
     26 import java.nio.ByteOrder;
     27 import java.util.ArrayList;
     28 import java.security.MessageDigest;
     29 import java.security.NoSuchAlgorithmException;
     30 
     31 /**
     32  * VerityTreeBuilder is used to generate the root hash of verity tree built from the input file.
     33  * The root hash can be used on device for on-access verification.  The tree itself is reproducible
     34  * on device, and is not shipped with the APK.
     35  */
     36 public class VerityTreeBuilder {
     37 
     38     /** Maximum size (in bytes) of each node of the tree. */
     39     private final static int CHUNK_SIZE = 4096;
     40 
     41     /** Digest algorithm (JCA Digest algorithm name) used in the tree. */
     42     private final static String JCA_ALGORITHM = "SHA-256";
     43 
     44     /** Optional salt to apply before each digestion. */
     45     private final byte[] mSalt;
     46 
     47     private final MessageDigest mMd;
     48 
     49     public VerityTreeBuilder(byte[] salt) throws NoSuchAlgorithmException {
     50         mSalt = salt;
     51         mMd = MessageDigest.getInstance(JCA_ALGORITHM);
     52     }
     53 
     54     /**
     55      * Returns the root hash of the APK verity tree built from ZIP blocks.
     56      *
     57      * Specifically, APK verity tree is built from the APK, but as if the APK Signing Block (which
     58      * must be page aligned) and the "Central Directory offset" field in End of Central Directory
     59      * are skipped.
     60      */
     61     public byte[] generateVerityTreeRootHash(DataSource beforeApkSigningBlock,
     62             DataSource centralDir, DataSource eocd) throws IOException {
     63         if (beforeApkSigningBlock.size() % CHUNK_SIZE != 0) {
     64             throw new IllegalStateException("APK Signing Block size not a multiple of " + CHUNK_SIZE
     65                     + ": " + beforeApkSigningBlock.size());
     66         }
     67 
     68         // Ensure that, when digesting, ZIP End of Central Directory record's Central Directory
     69         // offset field is treated as pointing to the offset at which the APK Signing Block will
     70         // start.
     71         long centralDirOffsetForDigesting = beforeApkSigningBlock.size();
     72         ByteBuffer eocdBuf = ByteBuffer.allocate((int) eocd.size());
     73         eocdBuf.order(ByteOrder.LITTLE_ENDIAN);
     74         eocd.copyTo(0, (int) eocd.size(), eocdBuf);
     75         eocdBuf.flip();
     76         ZipUtils.setZipEocdCentralDirectoryOffset(eocdBuf, centralDirOffsetForDigesting);
     77 
     78         return generateVerityTreeRootHash(new ChainedDataSource(beforeApkSigningBlock, centralDir,
     79                     DataSources.asDataSource(eocdBuf)));
     80     }
     81 
     82     /**
     83      * Returns the root hash of the verity tree built from the data source.
     84      *
     85      * The tree is built bottom up. The bottom level has 256-bit digest for each 4 KB block in the
     86      * input file.  If the total size is larger than 4 KB, take this level as input and repeat the
     87      * same procedure, until the level is within 4 KB.  If salt is given, it will apply to each
     88      * digestion before the actual data.
     89      *
     90      * The returned root hash is calculated from the last level of 4 KB chunk, similarly with salt.
     91      *
     92      * The tree is currently stored only in memory and is never written out.  Nevertheless, it is
     93      * the actual verity tree format on disk, and is supposed to be re-generated on device.
     94      *
     95      * This is package-private for testing purpose.
     96      */
     97     byte[] generateVerityTreeRootHash(DataSource fileSource) throws IOException {
     98         int digestSize = mMd.getDigestLength();
     99 
    100         // Calculate the summed area table of level size. In other word, this is the offset
    101         // table of each level, plus the next non-existing level.
    102         int[] levelOffset = calculateLevelOffset(fileSource.size(), digestSize);
    103 
    104         ByteBuffer verityBuffer = ByteBuffer.allocate(levelOffset[levelOffset.length - 1]);
    105 
    106         // Generate the hash tree bottom-up.
    107         for (int i = levelOffset.length - 2; i >= 0; i--) {
    108             DataSink middleBufferSink = new ByteBufferSink(
    109                     slice(verityBuffer, levelOffset[i], levelOffset[i + 1]));
    110             DataSource src;
    111             if (i == levelOffset.length - 2) {
    112                 src = fileSource;
    113                 digestDataByChunks(src, middleBufferSink);
    114             } else {
    115                 src = DataSources.asDataSource(slice(verityBuffer.asReadOnlyBuffer(),
    116                             levelOffset[i + 1], levelOffset[i + 2]));
    117                 digestDataByChunks(src, middleBufferSink);
    118             }
    119 
    120             // If the output is not full chunk, pad with 0s.
    121             long totalOutput = divideRoundup(src.size(), CHUNK_SIZE) * digestSize;
    122             int incomplete = (int) (totalOutput % CHUNK_SIZE);
    123             if (incomplete > 0) {
    124                 byte[] padding = new byte[CHUNK_SIZE - incomplete];
    125                 middleBufferSink.consume(padding, 0, padding.length);
    126             }
    127         }
    128 
    129         // Finally, calculate the root hash from the top level (only page).
    130         ByteBuffer firstPage = slice(verityBuffer.asReadOnlyBuffer(), 0, CHUNK_SIZE);
    131         return saltedDigest(firstPage);
    132     }
    133 
    134     /**
    135      * Returns an array of summed area table of level size in the verity tree.  In other words, the
    136      * returned array is offset of each level in the verity tree file format, plus an additional
    137      * offset of the next non-existing level (i.e. end of the last level + 1).  Thus the array size
    138      * is level + 1.
    139      */
    140     private static int[] calculateLevelOffset(long dataSize, int digestSize) {
    141         // Compute total size of each level, bottom to top.
    142         ArrayList<Long> levelSize = new ArrayList<>();
    143         while (true) {
    144             long chunkCount = divideRoundup(dataSize, CHUNK_SIZE);
    145             long size = CHUNK_SIZE * divideRoundup(chunkCount * digestSize, CHUNK_SIZE);
    146             levelSize.add(size);
    147             if (chunkCount * digestSize <= CHUNK_SIZE) {
    148                 break;
    149             }
    150             dataSize = chunkCount * digestSize;
    151         }
    152 
    153         // Reverse and convert to summed area table.
    154         int[] levelOffset = new int[levelSize.size() + 1];
    155         levelOffset[0] = 0;
    156         for (int i = 0; i < levelSize.size(); i++) {
    157             // We don't support verity tree if it is larger then Integer.MAX_VALUE.
    158             levelOffset[i + 1] = levelOffset[i] + Math.toIntExact(
    159                     levelSize.get(levelSize.size() - i - 1));
    160         }
    161         return levelOffset;
    162     }
    163 
    164     /**
    165      * Digest data source by chunks then feeds them to the sink one by one.  If the last unit is
    166      * less than the chunk size and padding is desired, feed with extra padding 0 to fill up the
    167      * chunk before digesting.
    168      */
    169     private void digestDataByChunks(DataSource dataSource, DataSink dataSink) throws IOException {
    170         long size = dataSource.size();
    171         long offset = 0;
    172         for (; offset + CHUNK_SIZE <= size; offset += CHUNK_SIZE) {
    173             ByteBuffer buffer = ByteBuffer.allocate(CHUNK_SIZE);
    174             dataSource.copyTo(offset, CHUNK_SIZE, buffer);
    175             buffer.rewind();
    176             byte[] hash = saltedDigest(buffer);
    177             dataSink.consume(hash, 0, hash.length);
    178         }
    179 
    180         // Send the last incomplete chunk with 0 padding to the sink at once.
    181         int remaining = (int) (size % CHUNK_SIZE);
    182         if (remaining > 0) {
    183             ByteBuffer buffer;
    184             buffer = ByteBuffer.allocate(CHUNK_SIZE);  // initialized to 0.
    185             dataSource.copyTo(offset, remaining, buffer);
    186             buffer.rewind();
    187 
    188             byte[] hash = saltedDigest(buffer);
    189             dataSink.consume(hash, 0, hash.length);
    190         }
    191     }
    192 
    193     /** Returns the digest of data with salt prepanded. */
    194     private byte[] saltedDigest(ByteBuffer data) {
    195         mMd.reset();
    196         if (mSalt != null) {
    197             mMd.update(mSalt);
    198         }
    199         mMd.update(data);
    200         return mMd.digest();
    201     }
    202 
    203     /** Divides a number and round up to the closest integer. */
    204     private static long divideRoundup(long dividend, long divisor) {
    205         return (dividend + divisor - 1) / divisor;
    206     }
    207 
    208     /** Returns a slice of the buffer with shared the content. */
    209     private static ByteBuffer slice(ByteBuffer buffer, int begin, int end) {
    210         ByteBuffer b = buffer.duplicate();
    211         b.position(0);  // to ensure position <= limit invariant.
    212         b.limit(end);
    213         b.position(begin);
    214         return b.slice();
    215     }
    216 }
    217